output
stringlengths 64
73.2k
| input
stringlengths 208
73.3k
| instruction
stringclasses 1
value |
---|---|---|
#fixed code
@Test
/**
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception,
TimeoutException {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTime(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new String("");
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired,
RefreshingOAuth2CredentialsInterceptor.getCacheState(underTest.headerCache.get()));
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale,
RefreshingOAuth2CredentialsInterceptor.getCacheState(underTest.headerCache.get()));
// Check to make sure we're no longer refreshing.
Assert.assertFalse(underTest.isRefreshing.get());
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing.get());
} | #vulnerable code
@Test
/**
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception,
TimeoutException {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTime(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new String("");
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired,
RefreshingOAuth2CredentialsInterceptor.getCacheState(underTest.headerCache.get()));
Future<Void> future = executorService.submit(syncRefreshCallable);
// let the Thread running syncRefreshCallable() have a turn so that it can initiate the call
// to refreshAccessToken().
Thread.yield();
synchronized(lock) {
lock.notifyAll();
}
// Try to get the access token, which should be calculated at this point. There's
// a possibility that some hanging occurs in the test code. If the operation times out
// so timeout after 1 second, this will throw a TimeoutException.
future.get(1, TimeUnit.SECONDS);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale,
RefreshingOAuth2CredentialsInterceptor.getCacheState(underTest.headerCache.get()));
// Check to make sure we're no longer refreshing.
Assert.assertFalse(underTest.isRefreshing.get());
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
future = executorService.submit(syncRefreshCallable);
// Let the asyncRefreshes do their thing.
Thread.yield();
// There should be a single thread kicked off by the underTest.asyncRefresh() calls about
// actually doing a refresh at this point; the other ones will have see that a refresh is in
// progress and finish the invocation of the Thread without performing a refres().. Make sure
// that at least 1 refresh process is in progress.
Assert.assertTrue(underTest.isRefreshing.get());
synchronized(lock) {
// Release the lock so that all of the async refreshing can complete.
lock.notifyAll();
}
// Wait for no more than a second to make sure that the call to underTest.syncRefresh()
// completes properly. If a second passes without syncRefresh() completing, future.get(..)
// will throw a TimeoutException.
future.get(1, TimeUnit.SECONDS);
Assert.assertFalse(underTest.isRefreshing.get());
}
#location 15
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testRetyableCheckAndMutateRow() throws Exception {
final CheckAndMutateRowRequest request = CheckAndMutateRowRequest.getDefaultInstance();
when(mockFuture.get()).thenReturn(CheckAndMutateRowResponse.getDefaultInstance());
underTest.checkAndMutateRow(request);
verify(clientCallService, times(1)).listenableAsyncCall(any(ClientCall.class), same(request));
} | #vulnerable code
@Test
public void testRetyableCheckAndMutateRow() throws InterruptedException {
final CheckAndMutateRowRequest request = CheckAndMutateRowRequest.getDefaultInstance();
final AtomicBoolean done = new AtomicBoolean(false);
executor.submit(new Callable<Void>(){
@Override
public Void call() throws Exception {
underTest.checkAndMutateRow(request);
done.set(true);
synchronized (done) {
done.notify();
}
return null;
}
});
Thread.sleep(100);
future.set(CheckAndMutateRowResponse.getDefaultInstance());
synchronized (done) {
done.wait(1000);
}
assertTrue(done.get());
verify(clientCallService, times(1)).listenableAsyncCall(any(ClientCall.class), same(request));
}
#location 17
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testRefreshAfterStale() throws Exception {
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
final AccessToken staleToken = new AccessToken("stale", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1));
AccessToken goodToken = new AccessToken("good", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 11));
//noinspection unchecked
Mockito.when(credentials.refreshAccessToken())
// First call will setup a stale token
.thenReturn(staleToken)
// Second call will give a good token
.thenReturn(goodToken);
// First call - setup
HeaderCacheElement firstResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, firstResult.getCacheState());
Assert.assertThat(firstResult.header, containsString("stale"));
// Fast forward until token is stale
setTimeInMillieconds(10);
// Second call - return stale token, but schedule refresh
HeaderCacheElement secondResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Stale, secondResult.getCacheState());
Assert.assertThat(secondResult.header, containsString("stale"));
// Wait for the refresh to finish
underTest.syncRefresh();
// Third call - now returns good token
HeaderCacheElement thirdResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, thirdResult.getCacheState());
Assert.assertThat(thirdResult.header, containsString("good"));
// Make sure that the token was only requested twice: once for the stale token & second time for the good token
Mockito.verify(credentials, times(2)).refreshAccessToken();
} | #vulnerable code
@Test
public void testRefreshAfterStale() throws Exception {
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
underTest.rateLimiter.setRate(100000);
final AccessToken staleToken = new AccessToken("stale", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1));
AccessToken goodToken = new AccessToken("good", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 11));
//noinspection unchecked
Mockito.when(credentials.refreshAccessToken())
// First call will setup a stale token
.thenReturn(staleToken)
// Second call will give a good token
.thenReturn(goodToken);
// First call - setup
HeaderCacheElement firstResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, firstResult.getCacheState());
Assert.assertThat(firstResult.header, containsString("stale"));
// Fast forward until token is stale
setTimeInMillieconds(10);
// Second call - return stale token, but schedule refresh
HeaderCacheElement secondResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Stale, secondResult.getCacheState());
Assert.assertThat(secondResult.header, containsString("stale"));
// Wait for the refresh to finish
final Future<?> waiter;
synchronized (underTest.lock) {
waiter = underTest.isRefreshing ? underTest.futureToken : Futures.immediateFuture(null);
}
waiter.get();
// Third call - now returns good token
HeaderCacheElement thirdResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, thirdResult.getCacheState());
Assert.assertThat(thirdResult.header, containsString("good"));
// Make sure that the token was only requested twice: once for the stale token & second time for the good token
Mockito.verify(credentials, times(2)).refreshAccessToken();
}
#location 37
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testRefreshAfterStale() throws Exception {
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
final AccessToken staleToken = new AccessToken("stale", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1));
AccessToken goodToken = new AccessToken("good", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 11));
//noinspection unchecked
Mockito.when(credentials.refreshAccessToken())
// First call will setup a stale token
.thenReturn(staleToken)
// Second call will give a good token
.thenReturn(goodToken);
// First call - setup
HeaderCacheElement firstResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, firstResult.getCacheState());
Assert.assertThat(firstResult.header, containsString("stale"));
// Fast forward until token is stale
setTimeInMillieconds(10);
// Second call - return stale token, but schedule refresh
HeaderCacheElement secondResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Stale, secondResult.getCacheState());
Assert.assertThat(secondResult.header, containsString("stale"));
// Wait for the refresh to finish
underTest.syncRefresh();
// Third call - now returns good token
HeaderCacheElement thirdResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, thirdResult.getCacheState());
Assert.assertThat(thirdResult.header, containsString("good"));
// Make sure that the token was only requested twice: once for the stale token & second time for the good token
Mockito.verify(credentials, times(2)).refreshAccessToken();
} | #vulnerable code
@Test
public void testRefreshAfterStale() throws Exception {
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
underTest.rateLimiter.setRate(100000);
final AccessToken staleToken = new AccessToken("stale", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1));
AccessToken goodToken = new AccessToken("good", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 11));
//noinspection unchecked
Mockito.when(credentials.refreshAccessToken())
// First call will setup a stale token
.thenReturn(staleToken)
// Second call will give a good token
.thenReturn(goodToken);
// First call - setup
HeaderCacheElement firstResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, firstResult.getCacheState());
Assert.assertThat(firstResult.header, containsString("stale"));
// Fast forward until token is stale
setTimeInMillieconds(10);
// Second call - return stale token, but schedule refresh
HeaderCacheElement secondResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Stale, secondResult.getCacheState());
Assert.assertThat(secondResult.header, containsString("stale"));
// Wait for the refresh to finish
final Future<?> waiter;
synchronized (underTest.lock) {
waiter = underTest.isRefreshing ? underTest.futureToken : Futures.immediateFuture(null);
}
waiter.get();
// Third call - now returns good token
HeaderCacheElement thirdResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, thirdResult.getCacheState());
Assert.assertThat(thirdResult.header, containsString("good"));
// Make sure that the token was only requested twice: once for the stale token & second time for the good token
Mockito.verify(credentials, times(2)).refreshAccessToken();
}
#location 33
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testPartialResults() throws Exception {
byte[] key1 = randomBytes(8);
byte[] key2 = randomBytes(8);
FlatRow response1 =
FlatRow.newBuilder()
.withRowKey(ByteString.copyFrom(key1))
.addCell(
new Cell(
"cf",
ByteString.EMPTY,
10,
ByteString.copyFromUtf8("hi!"),
new ArrayList<String>()))
.build();
RuntimeException exception = new RuntimeException("Something bad happened");
when(mockBulkRead.add(any(Query.class)))
.thenReturn(ApiFutures.immediateFuture(response1))
.thenReturn(ApiFutures.<FlatRow>immediateFailedFuture(exception));
List<Get> gets = Arrays.asList(new Get(key1), new Get(key2));
Object[] results = new Object[2];
try {
createExecutor().batch(gets, results);
} catch (RetriesExhaustedWithDetailsException ignored) {
}
Assert.assertTrue("first result is a result", results[0] instanceof Result);
Assert.assertTrue(Bytes.equals(((Result) results[0]).getRow(), key1));
Assert.assertEquals(exception, results[1]);
} | #vulnerable code
@Test
public void testPartialResults() throws Exception {
byte[] key1 = randomBytes(8);
byte[] key2 = randomBytes(8);
FlatRow response1 =
FlatRow.newBuilder()
.withRowKey(ByteString.copyFrom(key1))
.addCell(
new Cell(
"cf",
ByteString.EMPTY,
10,
ByteString.copyFromUtf8("hi!"),
new ArrayList<String>()))
.build();
RuntimeException exception = new RuntimeException("Something bad happened");
when(mockBulkRead.add(any(Query.class)))
.thenReturn(ApiFutures.immediateFuture(response1))
.thenReturn(ApiFutures.<FlatRow>immediateFailedFuture(exception));
List<Get> gets = Arrays.asList(new Get(key1), new Get(key2));
Object[] results = new Object[2];
try {
createExecutor(options).batch(gets, results);
} catch (RetriesExhaustedWithDetailsException ignored) {
}
Assert.assertTrue("first result is a result", results[0] instanceof Result);
Assert.assertTrue(Bytes.equals(((Result) results[0]).getRow(), key1));
Assert.assertEquals(exception, results[1]);
}
#location 26
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testSyncRefresh() throws IOException {
Mockito.when(mockCredentials.refreshAccessToken()).thenReturn(
new AccessToken("", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1)));
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, mockCredentials);
Assert.assertEquals(CacheState.Expired, underTest.getCacheState());
underTest.getHeaderSafe();
Assert.assertNotEquals(CacheState.Exception, underTest.getCacheState());
Assert.assertEquals(CacheState.Good, underTest.getCacheState());
Assert.assertFalse(underTest.isRefreshing());
} | #vulnerable code
@Test
public void testSyncRefresh() throws IOException {
initialize(HeaderCacheElement.TOKEN_STALENESS_MS + 1);
Assert.assertEquals(CacheState.Good, underTest.headerCache.getCacheState());
Assert.assertFalse(underTest.isRefreshing());
}
#location 4
#vulnerability type UNSAFE_GUARDED_BY_ACCESS | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
/*
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTimeInMillieconds(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new Object();
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest =
new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState());
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState());
// Check to make sure we're no longer refreshing.
Assert.assertFalse(underTest.isRefreshing());
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing());
} | #vulnerable code
@Test
/*
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTimeInMillieconds(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new Object();
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest =
new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
underTest.rateLimiter.setRate(100000);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState());
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState());
// Check to make sure we're no longer refreshing.
synchronized (underTest.lock) {
Assert.assertFalse(underTest.isRefreshing);
}
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing);
}
#location 51
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void flush() throws IOException {
// If there is a bulk mutation in progress, then send it.
if (bulkMutation != null) {
try {
bulkMutation.flush();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IOException("flush() was interrupted", e);
}
}
} | #vulnerable code
public void flush() throws IOException {
// If there is a bulk mutation in progress, then send it.
if (bulkMutation != null) {
try {
bulkMutation.flush();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IOException("flush() was interrupted", e);
}
}
asyncExecutor.flush();
}
#location 11
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
/**
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception,
TimeoutException {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTime(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new String("");
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired,
RefreshingOAuth2CredentialsInterceptor.getCacheState(underTest.headerCache.get()));
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale,
RefreshingOAuth2CredentialsInterceptor.getCacheState(underTest.headerCache.get()));
// Check to make sure we're no longer refreshing.
Assert.assertFalse(underTest.isRefreshing.get());
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing.get());
} | #vulnerable code
@Test
/**
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception,
TimeoutException {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTime(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new String("");
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired,
RefreshingOAuth2CredentialsInterceptor.getCacheState(underTest.headerCache.get()));
Future<Void> future = executorService.submit(syncRefreshCallable);
// let the Thread running syncRefreshCallable() have a turn so that it can initiate the call
// to refreshAccessToken().
Thread.yield();
synchronized(lock) {
lock.notifyAll();
}
// Try to get the access token, which should be calculated at this point. There's
// a possibility that some hanging occurs in the test code. If the operation times out
// so timeout after 1 second, this will throw a TimeoutException.
future.get(1, TimeUnit.SECONDS);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale,
RefreshingOAuth2CredentialsInterceptor.getCacheState(underTest.headerCache.get()));
// Check to make sure we're no longer refreshing.
Assert.assertFalse(underTest.isRefreshing.get());
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
future = executorService.submit(syncRefreshCallable);
// Let the asyncRefreshes do their thing.
Thread.yield();
// There should be a single thread kicked off by the underTest.asyncRefresh() calls about
// actually doing a refresh at this point; the other ones will have see that a refresh is in
// progress and finish the invocation of the Thread without performing a refres().. Make sure
// that at least 1 refresh process is in progress.
Assert.assertTrue(underTest.isRefreshing.get());
synchronized(lock) {
// Release the lock so that all of the async refreshing can complete.
lock.notifyAll();
}
// Wait for no more than a second to make sure that the call to underTest.syncRefresh()
// completes properly. If a second passes without syncRefresh() completing, future.get(..)
// will throw a TimeoutException.
future.get(1, TimeUnit.SECONDS);
Assert.assertFalse(underTest.isRefreshing.get());
}
#location 79
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testStaleAndExpired() throws IOException {
long expiration = HeaderCacheElement.TOKEN_STALENESS_MS + 1;
initialize(expiration);
Assert.assertEquals(CacheState.Good, underTest.getCacheState());
long startTime = 2L;
setTimeInMillieconds(startTime);
Assert.assertEquals(CacheState.Stale, underTest.getCacheState());
long expiredStaleDiff =
HeaderCacheElement.TOKEN_STALENESS_MS - HeaderCacheElement.TOKEN_EXPIRES_MS;
setTimeInMillieconds(startTime + expiredStaleDiff);
Assert.assertEquals(CacheState.Expired, underTest.getCacheState());
} | #vulnerable code
@Test
public void testStaleAndExpired() throws IOException {
long expiration = HeaderCacheElement.TOKEN_STALENESS_MS + 1;
initialize(expiration);
Assert.assertEquals(CacheState.Good, underTest.headerCache.getCacheState());
long startTime = 2L;
setTimeInMillieconds(startTime);
Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState());
long expiredStaleDiff =
HeaderCacheElement.TOKEN_STALENESS_MS - HeaderCacheElement.TOKEN_EXPIRES_MS;
setTimeInMillieconds(startTime + expiredStaleDiff);
Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState());
}
#location 5
#vulnerability type UNSAFE_GUARDED_BY_ACCESS | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testOptionsAreConstructedWithValidInput() throws IOException {
configuration.set(BigtableOptionsFactory.BIGTABLE_HOST_KEY, TEST_HOST);
configuration.setBoolean(BigtableOptionsFactory.BIGTABLE_USE_SERVICE_ACCOUNTS_KEY, false);
configuration.setBoolean(BigtableOptionsFactory.BIGTABLE_NULL_CREDENTIAL_ENABLE_KEY, true);
configuration.setBoolean(BigtableOptionsFactory.ALLOW_NO_TIMESTAMP_RETRIES_KEY, true);
configuration.setLong(BIGTABLE_BUFFERED_MUTATOR_MAX_MEMORY_KEY, 100_000L);
BigtableHBaseSettings settings = BigtableHBaseSettings.create(configuration);
assertTrue(settings.isRetriesWithoutTimestampAllowed());
BigtableOptions options = ((BigtableHBaseClassicSettings) settings).getBigtableOptions();
assertEquals(TEST_HOST, options.getDataHost());
assertEquals(TEST_PROJECT_ID, options.getProjectId());
assertEquals(TEST_INSTANCE_ID, options.getInstanceId());
assertEquals(100_000L, options.getBulkOptions().getMaxMemory());
} | #vulnerable code
@Test
public void testOptionsAreConstructedWithValidInput() throws IOException {
configuration.set(BigtableOptionsFactory.BIGTABLE_HOST_KEY, TEST_HOST);
configuration.setBoolean(BigtableOptionsFactory.BIGTABLE_USE_SERVICE_ACCOUNTS_KEY, false);
configuration.setBoolean(BigtableOptionsFactory.BIGTABLE_NULL_CREDENTIAL_ENABLE_KEY, true);
configuration.setLong(BIGTABLE_BUFFERED_MUTATOR_MAX_MEMORY_KEY, 100_000L);
BigtableOptions options =
((BigtableHBaseClassicSettings) BigtableHBaseSettings.create(configuration))
.getBigtableOptions();
assertEquals(TEST_HOST, options.getDataHost());
assertEquals(TEST_PROJECT_ID, options.getProjectId());
assertEquals(TEST_INSTANCE_ID, options.getInstanceId());
assertEquals(100_000L, options.getBulkOptions().getMaxMemory());
}
#location 12
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testRetyableCheckAndMutateRow() throws Exception {
final CheckAndMutateRowRequest request = CheckAndMutateRowRequest.getDefaultInstance();
when(mockFuture.get()).thenReturn(CheckAndMutateRowResponse.getDefaultInstance());
underTest.checkAndMutateRow(request);
verify(clientCallService, times(1)).listenableAsyncCall(any(ClientCall.class), same(request));
} | #vulnerable code
@Test
public void testRetyableCheckAndMutateRow() throws InterruptedException {
final CheckAndMutateRowRequest request = CheckAndMutateRowRequest.getDefaultInstance();
final AtomicBoolean done = new AtomicBoolean(false);
executor.submit(new Callable<Void>(){
@Override
public Void call() throws Exception {
underTest.checkAndMutateRow(request);
done.set(true);
synchronized (done) {
done.notify();
}
return null;
}
});
Thread.sleep(100);
future.set(CheckAndMutateRowResponse.getDefaultInstance());
synchronized (done) {
done.wait(1000);
}
assertTrue(done.get());
verify(clientCallService, times(1)).listenableAsyncCall(any(ClientCall.class), same(request));
}
#location 5
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Override
public void onClose(Status status, Metadata trailers) {
if (status.getCode() == Status.Code.CANCELLED
&& status.getDescription() != null
&& status.getDescription().contains(TIMEOUT_CANCEL_MSG)) {
// If this was canceled because of handleTimeout(). The cancel is immediately retried or
// completed in another fashion.
return;
}
super.onClose(status, trailers);
} | #vulnerable code
@Override
public void onClose(Status status, Metadata trailers) {
if (status.getCode() == Status.Code.CANCELLED
&& status.getDescription().contains(TIMEOUT_CANCEL_MSG)) {
// If this was canceled because of handleTimeout(). The cancel is immediately retried or
// completed in another fashion.
return;
}
super.onClose(status, trailers);
}
#location 4
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
/*
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTimeInMillieconds(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new Object();
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest =
new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState());
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState());
// Check to make sure we're no longer refreshing.
Assert.assertFalse(underTest.isRefreshing());
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing());
} | #vulnerable code
@Test
/*
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTimeInMillieconds(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new Object();
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest =
new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
underTest.rateLimiter.setRate(100000);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState());
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState());
// Check to make sure we're no longer refreshing.
synchronized (underTest.lock) {
Assert.assertFalse(underTest.isRefreshing);
}
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing);
}
#location 14
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testMutateRowPredicate() {
Predicate<MutateRowRequest> defaultPredicate = BigtableDataGrpcClient.IS_RETRYABLE_MUTATION;
createClient(true);
Predicate<MutateRowRequest> allowNoTimestampsPredicate =
predicates.get(BigtableServiceGrpc.METHOD_MUTATE_ROW.getFullMethodName());
assertFalse(defaultPredicate.apply(null));
assertTrue(allowNoTimestampsPredicate.apply(null));
MutateRowRequest noDataRequest = MutateRowRequest.getDefaultInstance();
assertTrue(defaultPredicate.apply(noDataRequest));
assertTrue(allowNoTimestampsPredicate.apply(noDataRequest));
MutateRowRequest requestWithCells = MutateRowRequest.newBuilder()
.addMutations(Mutation.newBuilder().setSetCell(SetCell.newBuilder().setTimestampMicros(-1)))
.build();
assertFalse(defaultPredicate.apply(requestWithCells));
assertTrue(allowNoTimestampsPredicate.apply(requestWithCells));
} | #vulnerable code
@Test
public void testMutateRowPredicate() {
Predicate<MutateRowRequest> predicate = BigtableDataGrpcClient.IS_RETRYABLE_MUTATION;
assertFalse(predicate.apply(null));
MutateRowRequest.Builder request = MutateRowRequest.newBuilder();
assertTrue(predicate.apply(request.build()));
request.addMutations(
Mutation.newBuilder().setSetCell(SetCell.newBuilder().setTimestampMicros(-1)));
assertFalse(predicate.apply(request.build()));
}
#location 9
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
/*
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTimeInMillieconds(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new Object();
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest =
new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState());
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState());
// Check to make sure we're no longer refreshing.
Assert.assertFalse(underTest.isRefreshing());
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing());
} | #vulnerable code
@Test
/*
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTimeInMillieconds(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new Object();
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest =
new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
underTest.rateLimiter.setRate(100000);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState());
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState());
// Check to make sure we're no longer refreshing.
synchronized (underTest.lock) {
Assert.assertFalse(underTest.isRefreshing);
}
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing);
}
#location 58
#vulnerability type UNSAFE_GUARDED_BY_ACCESS | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testSyncRefresh() throws IOException {
initialize(HeaderCacheElement.TOKEN_STALENESS_MS + 1);
Assert.assertEquals(CacheState.Good, underTest.headerCache.getCacheState());
Assert.assertFalse(underTest.isRefreshing());
} | #vulnerable code
@Test
public void testSyncRefresh() throws IOException {
initialize(HeaderCacheElement.TOKEN_STALENESS_MS + 1);
Assert.assertEquals(CacheState.Good, underTest.headerCache.getCacheState());
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testBatchBulkGets() throws Exception {
final List<Get> gets = new ArrayList<>(10);
final List<ApiFuture<FlatRow>> expected = new ArrayList<>(10);
gets.add(new Get(Bytes.toBytes("key0")));
expected.add(ApiFutures.<FlatRow>immediateFuture(null));
for (int i = 1; i < 10; i++) {
byte[] row_key = randomBytes(8);
gets.add(new Get(row_key));
ByteString key = ByteStringer.wrap(row_key);
ByteString cellValue = ByteString.copyFrom(randomBytes(8));
expected.add(
ApiFutures.immediateFuture(
FlatRow.newBuilder()
.withRowKey(key)
.addCell("family", ByteString.EMPTY, System.nanoTime() / 1000, cellValue)
.build()));
}
// Test 10 gets, but return only 9 to test the row not found case.
when(mockBulkRead.add(any(Query.class)))
.then(
new Answer<ApiFuture<FlatRow>>() {
final AtomicInteger counter = new AtomicInteger();
@Override
public ApiFuture<FlatRow> answer(InvocationOnMock invocation) throws Throwable {
return expected.get(counter.getAndIncrement());
}
});
ByteString key = ByteStringer.wrap(randomBytes(8));
ByteString cellValue = ByteString.copyFrom(randomBytes(8));
FlatRow row =
FlatRow.newBuilder()
.withRowKey(key)
.addCell("family", ByteString.EMPTY, System.nanoTime() / 1000, cellValue)
.build();
when(mockFuture.get()).thenReturn(row);
Result[] results = createExecutor().batch(gets);
verify(mockBulkRead, times(10)).add(any(Query.class));
verify(mockBulkRead, times(1)).flush();
Assert.assertTrue(matchesRow(Result.EMPTY_RESULT).matches(results[0]));
for (int i = 1; i < results.length; i++) {
Assert.assertTrue(
"Expected "
+ Bytes.toString(gets.get(i).getRow())
+ " but was "
+ Bytes.toString(results[i].getRow()),
Bytes.equals(results[i].getRow(), gets.get(i).getRow()));
}
} | #vulnerable code
@Test
public void testBatchBulkGets() throws Exception {
final List<Get> gets = new ArrayList<>(10);
final List<ApiFuture<FlatRow>> expected = new ArrayList<>(10);
gets.add(new Get(Bytes.toBytes("key0")));
expected.add(ApiFutures.<FlatRow>immediateFuture(null));
for (int i = 1; i < 10; i++) {
byte[] row_key = randomBytes(8);
gets.add(new Get(row_key));
ByteString key = ByteStringer.wrap(row_key);
ByteString cellValue = ByteString.copyFrom(randomBytes(8));
expected.add(
ApiFutures.immediateFuture(
FlatRow.newBuilder()
.withRowKey(key)
.addCell("family", ByteString.EMPTY, System.nanoTime() / 1000, cellValue)
.build()));
}
// Test 10 gets, but return only 9 to test the row not found case.
when(mockBulkRead.add(any(Query.class)))
.then(
new Answer<ApiFuture<FlatRow>>() {
final AtomicInteger counter = new AtomicInteger();
@Override
public ApiFuture<FlatRow> answer(InvocationOnMock invocation) throws Throwable {
return expected.get(counter.getAndIncrement());
}
});
ByteString key = ByteStringer.wrap(randomBytes(8));
ByteString cellValue = ByteString.copyFrom(randomBytes(8));
FlatRow row =
FlatRow.newBuilder()
.withRowKey(key)
.addCell("family", ByteString.EMPTY, System.nanoTime() / 1000, cellValue)
.build();
when(mockFuture.get()).thenReturn(row);
BatchExecutor underTest = createExecutor(options);
Result[] results = underTest.batch(gets);
verify(mockBulkRead, times(10)).add(any(Query.class));
verify(mockBulkRead, times(1)).flush();
Assert.assertTrue(matchesRow(Result.EMPTY_RESULT).matches(results[0]));
for (int i = 1; i < results.length; i++) {
Assert.assertTrue(
"Expected "
+ Bytes.toString(gets.get(i).getRow())
+ " but was "
+ Bytes.toString(results[i].getRow()),
Bytes.equals(results[i].getRow(), gets.get(i).getRow()));
}
}
#location 42
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Override
public void setMessageCompression(boolean enable) {
throw new UnsupportedOperationException("setMessageCompression()");
} | #vulnerable code
@Override
public void setMessageCompression(boolean enable) {
call.setMessageCompression(enable);
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Override
public void modifyTable(TableName tableName, TableDescriptor tableDescriptor) throws IOException {
super.modifyTable(tableName, new HTableDescriptor(tableDescriptor));
} | #vulnerable code
@Override
public void modifyTable(TableName tableName, TableDescriptor tableDescriptor) throws IOException {
if (isTableAvailable(tableName)) {
TableDescriptor currentTableDescriptor = getTableDescriptor(tableName);
List<Modification> modifications = new ArrayList<>();
List<HColumnDescriptor> columnDescriptors = tableAdapter2x.toHColumnDescriptors(tableDescriptor);
List<HColumnDescriptor> currentColumnDescriptors = tableAdapter2x.toHColumnDescriptors(currentTableDescriptor);
modifications.addAll(tableModificationAdapter.buildModifications(columnDescriptors, currentColumnDescriptors));
modifyColumn(tableName, "modifyTable", "update", (Modification[]) modifications.toArray());
} else {
throw new TableNotFoundException(tableName);
}
}
#location 7
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Override
public void onClose(Status status, Metadata trailers) {
synchronized (callLock) {
call = null;
}
rpcTimerContext.close();
// OK
if (status.isOk()) {
if (onOK(trailers)) {
operationTimerContext.close();
}
} else {
onError(status, trailers);
}
} | #vulnerable code
@Override
public void onClose(Status status, Metadata trailers) {
synchronized (callLock) {
call = null;
}
rpcTimerContext.close();
Status.Code code = status.getCode();
// OK
if (code == Status.Code.OK) {
if (onOK()) {
operationTimerContext.close();
}
return;
}
// CANCELLED
if (code == Status.Code.CANCELLED) {
// An explicit user cancellation is not considered a failure.
operationTimerContext.close();
return;
}
// Non retry scenario
if (!retryOptions.enableRetries()
|| !retryOptions.isRetryable(code)
// Unauthenticated is special because the request never made it to
// to the server, so all requests are retryable
|| !(isRequestRetryable() || code == Code.UNAUTHENTICATED)) {
rpc.getRpcMetrics().markFailure();
operationTimerContext.close();
setException(status.asRuntimeException());
return;
}
// Attempt retry with backoff
long nextBackOff = getNextBackoff();
failedCount += 1;
// Backoffs timed out.
if (nextBackOff == BackOff.STOP) {
rpc.getRpcMetrics().markRetriesExhasted();
operationTimerContext.close();
String message = String.format("Exhausted retries after %d failures.", failedCount);
StatusRuntimeException cause = status.asRuntimeException();
setException(new BigtableRetriesExhaustedException(message, cause));
return;
} else {
String channelId = ChannelPool.extractIdentifier(trailers);
LOG.info("Retrying failed call. Failure #%d, got: %s on channel %s",
status.getCause(), failedCount, status, channelId);
}
performRetry(nextBackOff);
}
#location 39
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testChannelsAreRoundRobinned() throws IOException {
MockChannelFactory factory = new MockChannelFactory();
MethodDescriptor descriptor = mock(MethodDescriptor.class);
MockitoAnnotations.initMocks(this);
ChannelPool pool = new ChannelPool(null, factory);
pool.ensureChannelCount(2);
pool.newCall(descriptor, CallOptions.DEFAULT);
verify(factory.channels.get(0), times(1)).newCall(same(descriptor), same(CallOptions.DEFAULT));
verify(factory.channels.get(1), times(0)).newCall(same(descriptor), same(CallOptions.DEFAULT));
pool.newCall(descriptor, CallOptions.DEFAULT);
verify(factory.channels.get(0), times(1)).newCall(same(descriptor), same(CallOptions.DEFAULT));
verify(factory.channels.get(1), times(1)).newCall(same(descriptor), same(CallOptions.DEFAULT));
} | #vulnerable code
@Test
public void testChannelsAreRoundRobinned() throws IOException {
MockChannelFactory factory = new MockChannelFactory();
MethodDescriptor descriptor = mock(MethodDescriptor.class);
MockitoAnnotations.initMocks(this);
ChannelPool pool = new ChannelPool(null, factory);
pool.ensureChannelCount(2);
pool.newCall(descriptor, CallOptions.DEFAULT);
verify(factory.channels.get(0), times(1)).newCall(same(descriptor), same(CallOptions.DEFAULT));
verify(factory.channels.get(1), times(0)).newCall(same(descriptor), same(CallOptions.DEFAULT));
pool.newCall(descriptor, CallOptions.DEFAULT);
verify(factory.channels.get(0), times(1)).newCall(same(descriptor), same(CallOptions.DEFAULT));
verify(factory.channels.get(1), times(1)).newCall(same(descriptor), same(CallOptions.DEFAULT));
}
#location 8
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testRefreshAfterFailure() throws Exception {
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
final AccessToken accessToken = new AccessToken("hi", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1));
//noinspection unchecked
Mockito.when(credentials.refreshAccessToken())
// First call will throw Exception & bypass retries
.thenThrow(new IOException())
// Second call will succeed
.thenReturn(accessToken);
// First call
HeaderCacheElement firstResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Exception, firstResult.getCacheState());
// Now the second token should be available
HeaderCacheElement secondResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, secondResult.getCacheState());
Assert.assertThat(secondResult.header, containsString("hi"));
// Make sure that the token was only requested twice: once for the first failure & second time for background recovery
Mockito.verify(credentials, times(2)).refreshAccessToken();
} | #vulnerable code
@Test
public void testRefreshAfterFailure() throws Exception {
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
underTest.rateLimiter.setRate(100000);
final AccessToken accessToken = new AccessToken("hi", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1));
//noinspection unchecked
Mockito.when(credentials.refreshAccessToken())
// First call will throw Exception & bypass retries
.thenThrow(new IOException())
// Second call will succeed
.thenReturn(accessToken);
// First call
HeaderCacheElement firstResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Exception, firstResult.getCacheState());
// Now the second token should be available
HeaderCacheElement secondResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, secondResult.getCacheState());
Assert.assertThat(secondResult.header, containsString("hi"));
// Make sure that the token was only requested twice: once for the first failure & second time for background recovery
Mockito.verify(credentials, times(2)).refreshAccessToken();
}
#location 20
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testCBC_UserAgentUsingPlainTextNegotiation() throws Exception{
ServerSocket serverSocket = new ServerSocket(0);
final int availablePort = serverSocket.getLocalPort();
serverSocket.close();
//Creates non-ssl server.
createServer(availablePort);
BigtableOptions bigtableOptions =
BigtableOptions.builder()
.setDataHost("localhost")
.setAdminHost("localhost")
.setProjectId(TEST_PROJECT_ID)
.setInstanceId(TEST_INSTANCE_ID)
.setUserAgent(TEST_USER_AGENT)
.setUsePlaintextNegotiation(true)
.setCredentialOptions(CredentialOptions.nullCredential())
.setPort(availablePort)
.build();
xGoogApiPattern = Pattern.compile(".* cbt/.*");
try (BigtableSession session = new BigtableSession(bigtableOptions)) {
session.getDataClient()
.readFlatRows(ReadRowsRequest.getDefaultInstance()).next();
Assert.assertTrue(serverPasses.get());
}
} | #vulnerable code
@Test
public void testCBC_UserAgentUsingPlainTextNegotiation() throws Exception{
ServerSocket serverSocket = new ServerSocket(0);
final int availablePort = serverSocket.getLocalPort();
serverSocket.close();
//Creates non-ssl server.
createServer(availablePort);
BigtableOptions bigtableOptions =
BigtableOptions.builder()
.setDataHost("localhost")
.setAdminHost("localhost")
.setProjectId(TEST_PROJECT_ID)
.setInstanceId(TEST_INSTANCE_ID)
.setUserAgent(TEST_USER_AGENT)
.setUsePlaintextNegotiation(true)
.setCredentialOptions(CredentialOptions.nullCredential())
.setPort(availablePort)
.build();
xGoogApiPattern = Pattern.compile(".* cbt/.*");
new BigtableSession(bigtableOptions).getDataClient()
.readFlatRows(ReadRowsRequest.getDefaultInstance()).next();
Assert.assertTrue(serverPasses.get());
}
#location 24
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@SuppressWarnings("unchecked")
@Override
public void run() {
try {
// restart the clock.
synchronized (callLock) {
super.run();
// pre-fetch one more result, for performance reasons.
adapter.request(1);
if (rowObserver instanceof ClientResponseObserver) {
((ClientResponseObserver<ReadRowsRequest, FlatRow>) rowObserver).beforeStart(adapter);
}
lastResponseMs = clock.currentTimeMillis();
}
} catch (Exception e) {
setException(e);
}
} | #vulnerable code
@SuppressWarnings("unchecked")
@Override
public void run() {
try {
// restart the clock.
this.rowMerger = new RowMerger(rowObserver);
adapter = new CallToStreamObserverAdapter();
synchronized (callLock) {
super.run();
// pre-fetch one more result, for performance reasons.
adapter.request(1);
if (rowObserver instanceof ClientResponseObserver) {
((ClientResponseObserver<ReadRowsRequest, FlatRow>) rowObserver).beforeStart(adapter);
}
lastResponseMs = clock.currentTimeMillis();
}
} catch (Exception e) {
setException(e);
}
}
#location 13
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private void retryOnTimeout(ScanTimeoutException rte) throws BigtableRetriesExhaustedException {
LOG.info("The client could not get a response in %d ms. Retrying the scan.",
retryOptions.getReadPartialRowTimeoutMillis());
// Cancel the existing rpc.
cancel(TIMEOUT_CANCEL_MSG);
rpcTimerContext.close();
failedCount++;
// Can this request be retried
int maxRetries = retryOptions.getMaxScanTimeoutRetries();
if (retryOptions.enableRetries() && ++timeoutRetryCount <= maxRetries) {
resetStatusBasedBackoff();
performRetry(0);
} else {
throw getExhaustedRetriesException(Status.ABORTED);
}
} | #vulnerable code
private void retryOnTimeout(ScanTimeoutException rte) throws BigtableRetriesExhaustedException {
LOG.info("The client could not get a response in %d ms. Retrying the scan.",
retryOptions.getReadPartialRowTimeoutMillis());
// Cancel the existing rpc.
cancel(TIMEOUT_CANCEL_MSG);
rpcTimerContext.close();
failedCount++;
// Can this request be retried
int maxRetries = retryOptions.getMaxScanTimeoutRetries();
if (retryOptions.enableRetries() && ++timeoutRetryCount <= maxRetries) {
rpc.getRpcMetrics().markRetry();
resetStatusBasedBackoff();
run();
} else {
throw getExhaustedRetriesException(Status.ABORTED);
}
}
#location 15
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Override
public void onClose(Status status, Metadata trailers) {
try (Scope scope = TRACER.withSpan(operationSpan)) {
callWrapper.resetCall();
rpcTimerContext.close();
// OK
if (status.isOk()) {
if (onOK(trailers)) {
finalizeStats(status);
}
} else {
onError(status, trailers);
}
} catch (Exception e) {
setException(e);
}
} | #vulnerable code
@Override
public void onClose(Status status, Metadata trailers) {
try (Scope scope = TRACER.withSpan(operationSpan)) {
synchronized (callLock) {
call = NULL_CALL;
}
rpcTimerContext.close();
// OK
if (status.isOk()) {
if (onOK(trailers)) {
finalizeStats(status);
}
} else {
onError(status, trailers);
}
} catch (Exception e) {
setException(e);
}
}
#location 7
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public static Credentials getCredentials(CredentialOptions options)
throws IOException, GeneralSecurityException {
return patchCredentials(getCredentialsInner(options));
} | #vulnerable code
public static Credentials getCredentials(CredentialOptions options)
throws IOException, GeneralSecurityException {
switch (options.getCredentialType()) {
case DefaultCredentials:
return getApplicationDefaultCredential();
case P12:
P12CredentialOptions p12Options = (P12CredentialOptions) options;
return getCredentialFromPrivateKeyServiceAccount(
p12Options.getServiceAccount(), p12Options.getKeyFile());
case SuppliedCredentials:
return ((UserSuppliedCredentialOptions) options).getCredential();
case SuppliedJson:
JsonCredentialsOptions jsonCredentialsOptions = (JsonCredentialsOptions) options;
synchronized (jsonCredentialsOptions) {
if (jsonCredentialsOptions.getCachedCredentials() == null) {
jsonCredentialsOptions.setCachedCredentails(
getInputStreamCredential(jsonCredentialsOptions.getInputStream()));
}
return jsonCredentialsOptions.getCachedCredentials();
}
case None:
return null;
default:
throw new IllegalStateException(
"Cannot process Credential type: " + options.getCredentialType());
}
}
#location 5
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void awaitCompletion() throws InterruptedException {
boolean performedWarning = false;
lock.lock();
try {
while (!isFlushed()) {
flushedCondition.await(finishWaitMillis, TimeUnit.MILLISECONDS);
long now = clock.nanoTime();
if (now >= noSuccessWarningDeadlineNanos) {
logNoSuccessWarning(now);
resetNoSuccessWarningDeadline();
performedWarning = true;
}
}
if (performedWarning) {
LOG.info("awaitCompletion() completed");
}
} finally {
lock.unlock();
}
} | #vulnerable code
public void awaitCompletion() throws InterruptedException {
boolean performedWarning = false;
lock.lock();
try {
while (!isFlushed()) {
flushedCondition.await(finishWaitMillis, TimeUnit.MILLISECONDS);
long now = clock.nanoTime();
if (now >= noSuccessWarningDeadline) {
logNoSuccessWarning(now);
resetNoSuccessWarningDeadline();
performedWarning = true;
}
}
if (performedWarning) {
LOG.info("awaitCompletion() completed");
}
} finally {
lock.unlock();
}
}
#location 10
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testRefreshAfterStale() throws Exception {
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
final AccessToken staleToken = new AccessToken("stale", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1));
AccessToken goodToken = new AccessToken("good", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 11));
//noinspection unchecked
Mockito.when(credentials.refreshAccessToken())
// First call will setup a stale token
.thenReturn(staleToken)
// Second call will give a good token
.thenReturn(goodToken);
// First call - setup
HeaderCacheElement firstResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, firstResult.getCacheState());
Assert.assertThat(firstResult.header, containsString("stale"));
// Fast forward until token is stale
setTimeInMillieconds(10);
// Second call - return stale token, but schedule refresh
HeaderCacheElement secondResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Stale, secondResult.getCacheState());
Assert.assertThat(secondResult.header, containsString("stale"));
// Wait for the refresh to finish
underTest.syncRefresh();
// Third call - now returns good token
HeaderCacheElement thirdResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, thirdResult.getCacheState());
Assert.assertThat(thirdResult.header, containsString("good"));
// Make sure that the token was only requested twice: once for the stale token & second time for the good token
Mockito.verify(credentials, times(2)).refreshAccessToken();
} | #vulnerable code
@Test
public void testRefreshAfterStale() throws Exception {
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
underTest.rateLimiter.setRate(100000);
final AccessToken staleToken = new AccessToken("stale", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1));
AccessToken goodToken = new AccessToken("good", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 11));
//noinspection unchecked
Mockito.when(credentials.refreshAccessToken())
// First call will setup a stale token
.thenReturn(staleToken)
// Second call will give a good token
.thenReturn(goodToken);
// First call - setup
HeaderCacheElement firstResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, firstResult.getCacheState());
Assert.assertThat(firstResult.header, containsString("stale"));
// Fast forward until token is stale
setTimeInMillieconds(10);
// Second call - return stale token, but schedule refresh
HeaderCacheElement secondResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Stale, secondResult.getCacheState());
Assert.assertThat(secondResult.header, containsString("stale"));
// Wait for the refresh to finish
final Future<?> waiter;
synchronized (underTest.lock) {
waiter = underTest.isRefreshing ? underTest.futureToken : Futures.immediateFuture(null);
}
waiter.get();
// Third call - now returns good token
HeaderCacheElement thirdResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, thirdResult.getCacheState());
Assert.assertThat(thirdResult.header, containsString("good"));
// Make sure that the token was only requested twice: once for the stale token & second time for the good token
Mockito.verify(credentials, times(2)).refreshAccessToken();
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@SuppressWarnings("unchecked")
@Override
public void run() {
try {
// restart the clock.
synchronized (callLock) {
super.run();
// pre-fetch one more result, for performance reasons.
adapter.request(1);
if (rowObserver instanceof ClientResponseObserver) {
((ClientResponseObserver<ReadRowsRequest, FlatRow>) rowObserver).beforeStart(adapter);
}
lastResponseMs = clock.currentTimeMillis();
}
} catch (Exception e) {
setException(e);
}
} | #vulnerable code
@SuppressWarnings("unchecked")
@Override
public void run() {
try {
// restart the clock.
this.rowMerger = new RowMerger(rowObserver);
adapter = new CallToStreamObserverAdapter();
synchronized (callLock) {
super.run();
// pre-fetch one more result, for performance reasons.
adapter.request(1);
if (rowObserver instanceof ClientResponseObserver) {
((ClientResponseObserver<ReadRowsRequest, FlatRow>) rowObserver).beforeStart(adapter);
}
lastResponseMs = clock.currentTimeMillis();
}
} catch (Exception e) {
setException(e);
}
}
#location 6
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testBatchBulkGets() throws Exception {
final List<Get> gets = new ArrayList<>(10);
final List<ApiFuture<Result>> expected = new ArrayList<>(10);
gets.add(new Get(Bytes.toBytes("key0")));
expected.add(ApiFutures.<Result>immediateFuture(null));
for (int i = 1; i < 10; i++) {
byte[] row_key = randomBytes(8);
gets.add(new Get(row_key));
ByteString key = ByteStringer.wrap(row_key);
ByteString cellValue = ByteString.copyFrom(randomBytes(8));
expected.add(
ApiFutures.immediateFuture(
Result.create(
ImmutableList.<Cell>of(
new RowCell(
key.toByteArray(),
Bytes.toBytes("family"),
Bytes.toBytes(""),
System.nanoTime() / 1000,
cellValue.toByteArray())))));
}
// Test 10 gets, but return only 9 to test the row not found case.
when(mockBulkRead.add(any(Query.class)))
.then(
new Answer<ApiFuture<Result>>() {
final AtomicInteger counter = new AtomicInteger();
@Override
public ApiFuture<Result> answer(InvocationOnMock invocation) throws Throwable {
return expected.get(counter.getAndIncrement());
}
});
ByteString key = ByteStringer.wrap(randomBytes(8));
ByteString cellValue = ByteString.copyFrom(randomBytes(8));
Result row =
Result.create(
ImmutableList.<Cell>of(
new RowCell(
key.toByteArray(),
Bytes.toBytes("family"),
Bytes.toBytes(""),
1000L,
cellValue.toByteArray())));
when(mockFuture.get()).thenReturn(row);
Result[] results = createExecutor().batch(gets);
verify(mockBulkRead, times(10)).add(any(Query.class));
verify(mockBulkRead, times(1)).flush();
assertTrue(matchesRow(Result.EMPTY_RESULT).matches(results[0]));
for (int i = 1; i < results.length; i++) {
assertTrue(
"Expected "
+ Bytes.toString(gets.get(i).getRow())
+ " but was "
+ Bytes.toString(results[i].getRow()),
Bytes.equals(results[i].getRow(), gets.get(i).getRow()));
}
} | #vulnerable code
@Test
public void testBatchBulkGets() throws Exception {
final List<Get> gets = new ArrayList<>(10);
final List<ApiFuture<FlatRow>> expected = new ArrayList<>(10);
gets.add(new Get(Bytes.toBytes("key0")));
expected.add(ApiFutures.<FlatRow>immediateFuture(null));
for (int i = 1; i < 10; i++) {
byte[] row_key = randomBytes(8);
gets.add(new Get(row_key));
ByteString key = ByteStringer.wrap(row_key);
ByteString cellValue = ByteString.copyFrom(randomBytes(8));
expected.add(
ApiFutures.immediateFuture(
FlatRow.newBuilder()
.withRowKey(key)
.addCell("family", ByteString.EMPTY, System.nanoTime() / 1000, cellValue)
.build()));
}
// Test 10 gets, but return only 9 to test the row not found case.
when(mockBulkRead.add(any(Query.class)))
.then(
new Answer<ApiFuture<FlatRow>>() {
final AtomicInteger counter = new AtomicInteger();
@Override
public ApiFuture<FlatRow> answer(InvocationOnMock invocation) throws Throwable {
return expected.get(counter.getAndIncrement());
}
});
ByteString key = ByteStringer.wrap(randomBytes(8));
ByteString cellValue = ByteString.copyFrom(randomBytes(8));
FlatRow row =
FlatRow.newBuilder()
.withRowKey(key)
.addCell("family", ByteString.EMPTY, System.nanoTime() / 1000, cellValue)
.build();
when(mockFuture.get()).thenReturn(row);
Result[] results = createExecutor().batch(gets);
verify(mockBulkRead, times(10)).add(any(Query.class));
verify(mockBulkRead, times(1)).flush();
Assert.assertTrue(matchesRow(Result.EMPTY_RESULT).matches(results[0]));
for (int i = 1; i < results.length; i++) {
Assert.assertTrue(
"Expected "
+ Bytes.toString(gets.get(i).getRow())
+ " but was "
+ Bytes.toString(results[i].getRow()),
Bytes.equals(results[i].getRow(), gets.get(i).getRow()));
}
}
#location 41
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testPartialResults() throws Exception {
when(mockBigtableApi.getDataClient()).thenReturn(mockDataClientWrapper);
when(mockDataClientWrapper.createBulkRead(isA(String.class))).thenReturn(mockBulkRead);
byte[] key1 = randomBytes(8);
byte[] key2 = randomBytes(8);
Result expected =
Result.create(
ImmutableList.<org.apache.hadoop.hbase.Cell>of(
new RowCell(
key1,
Bytes.toBytes("cf"),
Bytes.toBytes(""),
10,
Bytes.toBytes("hi!"),
ImmutableList.<String>of())));
RuntimeException exception = new RuntimeException("Something bad happened");
when(mockBulkRead.add(any(Query.class)))
.thenReturn(ApiFutures.immediateFuture(expected))
.thenReturn(ApiFutures.<Result>immediateFailedFuture(exception));
List<Get> gets = Arrays.asList(new Get(key1), new Get(key2));
Object[] results = new Object[2];
try {
createExecutor().batch(gets, results);
} catch (RetriesExhaustedWithDetailsException ignored) {
}
assertTrue("first result is a result", results[0] instanceof Result);
assertTrue(matchesRow(expected).matches(results[0]));
Assert.assertEquals(exception, results[1]);
} | #vulnerable code
@Test
public void testPartialResults() throws Exception {
when(mockBigtableApi.getDataClient()).thenReturn(mockDataClientWrapper);
when(mockDataClientWrapper.createBulkRead(isA(String.class))).thenReturn(mockBulkRead);
byte[] key1 = randomBytes(8);
byte[] key2 = randomBytes(8);
Result result =
Result.create(
ImmutableList.<org.apache.hadoop.hbase.Cell>of(
new RowCell(
key1,
"cf".getBytes(),
"".getBytes(),
10,
"hi!".getBytes(),
ImmutableList.<String>of())));
RuntimeException exception = new RuntimeException("Something bad happened");
when(mockBulkRead.add(any(Query.class)))
.thenReturn(ApiFutures.immediateFuture(result))
.thenReturn(ApiFutures.<Result>immediateFailedFuture(exception));
List<Get> gets = Arrays.asList(new Get(key1), new Get(key2));
Object[] results = new Object[2];
try {
createExecutor().batch(gets, results);
} catch (RetriesExhaustedWithDetailsException ignored) {
}
Assert.assertTrue("first result is a result", results[0] instanceof Result);
Assert.assertArrayEquals(key1, ((Result) results[0]).getRow());
Assert.assertEquals(exception, results[1]);
}
#location 26
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Override
public void onClose(Status status, Metadata trailers) {
try (Scope scope = TRACER.withSpan(operationSpan)) {
callWrapper.resetCall();
rpcTimerContext.close();
// OK
if (status.isOk()) {
if (onOK(trailers)) {
finalizeStats(status);
}
} else {
onError(status, trailers);
}
} catch (Exception e) {
setException(e);
}
} | #vulnerable code
@Override
public void onClose(Status status, Metadata trailers) {
try (Scope scope = TRACER.withSpan(operationSpan)) {
synchronized (callLock) {
call = NULL_CALL;
}
rpcTimerContext.close();
// OK
if (status.isOk()) {
if (onOK(trailers)) {
finalizeStats(status);
}
} else {
onError(status, trailers);
}
} catch (Exception e) {
setException(e);
}
}
#location 14
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testCBC_UserAgentUsingPlainTextNegotiation() throws Exception{
ServerSocket serverSocket = new ServerSocket(0);
final int availablePort = serverSocket.getLocalPort();
serverSocket.close();
//Creates non-ssl server.
createServer(availablePort);
BigtableOptions bigtableOptions =
BigtableOptions.builder()
.setDataHost("localhost")
.setAdminHost("localhost")
.setProjectId(TEST_PROJECT_ID)
.setInstanceId(TEST_INSTANCE_ID)
.setUserAgent(TEST_USER_AGENT)
.setUsePlaintextNegotiation(true)
.setCredentialOptions(CredentialOptions.nullCredential())
.setPort(availablePort)
.build();
xGoogApiPattern = Pattern.compile(".* cbt/.*");
try (BigtableSession session = new BigtableSession(bigtableOptions)) {
session.getDataClient()
.readFlatRows(ReadRowsRequest.getDefaultInstance()).next();
Assert.assertTrue(serverPasses.get());
}
} | #vulnerable code
@Test
public void testCBC_UserAgentUsingPlainTextNegotiation() throws Exception{
ServerSocket serverSocket = new ServerSocket(0);
final int availablePort = serverSocket.getLocalPort();
serverSocket.close();
//Creates non-ssl server.
createServer(availablePort);
BigtableOptions bigtableOptions =
BigtableOptions.builder()
.setDataHost("localhost")
.setAdminHost("localhost")
.setProjectId(TEST_PROJECT_ID)
.setInstanceId(TEST_INSTANCE_ID)
.setUserAgent(TEST_USER_AGENT)
.setUsePlaintextNegotiation(true)
.setCredentialOptions(CredentialOptions.nullCredential())
.setPort(availablePort)
.build();
xGoogApiPattern = Pattern.compile(".* cbt/.*");
new BigtableSession(bigtableOptions).getDataClient()
.readFlatRows(ReadRowsRequest.getDefaultInstance()).next();
Assert.assertTrue(serverPasses.get());
}
#location 23
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private Result[] batch(final List<? extends org.apache.hadoop.hbase.client.Row> actions)
throws Exception {
return createExecutor().batch(actions);
} | #vulnerable code
private Result[] batch(final List<? extends org.apache.hadoop.hbase.client.Row> actions)
throws Exception {
return createExecutor(options).batch(actions);
}
#location 3
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Override
public void onClose(Status status, Metadata trailers) {
synchronized (callLock) {
call = null;
}
rpcTimerContext.close();
// OK
if (status.isOk()) {
if (onOK(trailers)) {
operationTimerContext.close();
}
} else {
onError(status, trailers);
}
} | #vulnerable code
@Override
public void onClose(Status status, Metadata trailers) {
synchronized (callLock) {
call = null;
}
rpcTimerContext.close();
Status.Code code = status.getCode();
// OK
if (code == Status.Code.OK) {
if (onOK()) {
operationTimerContext.close();
}
return;
}
// CANCELLED
if (code == Status.Code.CANCELLED) {
// An explicit user cancellation is not considered a failure.
operationTimerContext.close();
return;
}
// Non retry scenario
if (!retryOptions.enableRetries()
|| !retryOptions.isRetryable(code)
// Unauthenticated is special because the request never made it to
// to the server, so all requests are retryable
|| !(isRequestRetryable() || code == Code.UNAUTHENTICATED)) {
rpc.getRpcMetrics().markFailure();
operationTimerContext.close();
setException(status.asRuntimeException());
return;
}
// Attempt retry with backoff
long nextBackOff = getNextBackoff();
failedCount += 1;
// Backoffs timed out.
if (nextBackOff == BackOff.STOP) {
rpc.getRpcMetrics().markRetriesExhasted();
operationTimerContext.close();
String message = String.format("Exhausted retries after %d failures.", failedCount);
StatusRuntimeException cause = status.asRuntimeException();
setException(new BigtableRetriesExhaustedException(message, cause));
return;
} else {
String channelId = ChannelPool.extractIdentifier(trailers);
LOG.info("Retrying failed call. Failure #%d, got: %s on channel %s",
status.getCause(), failedCount, status, channelId);
}
performRetry(nextBackOff);
}
#location 53
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testRetyableMutateRow() throws Exception {
final MutateRowRequest request = MutateRowRequest.getDefaultInstance();
when(mockFuture.get()).thenReturn(Empty.getDefaultInstance());
underTest.mutateRow(request);
verify(clientCallService, times(1)).listenableAsyncCall(any(ClientCall.class), same(request));
} | #vulnerable code
@Test
public void testRetyableMutateRow() throws InterruptedException {
final MutateRowRequest request = MutateRowRequest.getDefaultInstance();
final AtomicBoolean done = new AtomicBoolean(false);
executor.submit(new Callable<Void>(){
@Override
public Void call() throws Exception {
underTest.mutateRow(request);
done.set(true);
synchronized (done) {
done.notify();
}
return null;
}
});
Thread.sleep(100);
future.set(MutateRowsResponse.getDefaultInstance());
synchronized (done) {
done.wait(1000);
}
assertTrue(done.get());
verify(clientCallService, times(1)).listenableAsyncCall(any(ClientCall.class), same(request));
}
#location 17
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testGetCallback() throws Exception {
when(mockBulkRead.add(any(Query.class))).thenReturn(mockFuture);
byte[] key = randomBytes(8);
FlatRow response = FlatRow.newBuilder().withRowKey(ByteString.copyFrom(key)).build();
setFuture(ImmutableList.of(response));
final Callback<Result> callback = Mockito.mock(Callback.class);
List<Get> gets = Arrays.asList(new Get(key));
createExecutor().batchCallback(gets, new Object[1], callback);
verify(callback, times(1))
.update(
same(BatchExecutor.NO_REGION),
same(key),
argThat(matchesRow(Adapters.FLAT_ROW_ADAPTER.adaptResponse(response))));
} | #vulnerable code
@Test
public void testGetCallback() throws Exception {
when(mockBulkRead.add(any(Query.class))).thenReturn(mockFuture);
byte[] key = randomBytes(8);
FlatRow response = FlatRow.newBuilder().withRowKey(ByteString.copyFrom(key)).build();
setFuture(ImmutableList.of(response));
final Callback<Result> callback = Mockito.mock(Callback.class);
List<Get> gets = Arrays.asList(new Get(key));
createExecutor(options).batchCallback(gets, new Object[1], callback);
verify(callback, times(1))
.update(
same(BatchExecutor.NO_REGION),
same(key),
argThat(matchesRow(Adapters.FLAT_ROW_ADAPTER.adaptResponse(response))));
}
#location 9
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testRefreshAfterStale() throws Exception {
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
final AccessToken staleToken = new AccessToken("stale", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1));
AccessToken goodToken = new AccessToken("good", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 11));
//noinspection unchecked
Mockito.when(credentials.refreshAccessToken())
// First call will setup a stale token
.thenReturn(staleToken)
// Second call will give a good token
.thenReturn(goodToken);
// First call - setup
HeaderCacheElement firstResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, firstResult.getCacheState());
Assert.assertThat(firstResult.header, containsString("stale"));
// Fast forward until token is stale
setTimeInMillieconds(10);
// Second call - return stale token, but schedule refresh
HeaderCacheElement secondResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Stale, secondResult.getCacheState());
Assert.assertThat(secondResult.header, containsString("stale"));
// Wait for the refresh to finish
underTest.syncRefresh();
// Third call - now returns good token
HeaderCacheElement thirdResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, thirdResult.getCacheState());
Assert.assertThat(thirdResult.header, containsString("good"));
// Make sure that the token was only requested twice: once for the stale token & second time for the good token
Mockito.verify(credentials, times(2)).refreshAccessToken();
} | #vulnerable code
@Test
public void testRefreshAfterStale() throws Exception {
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
underTest.rateLimiter.setRate(100000);
final AccessToken staleToken = new AccessToken("stale", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1));
AccessToken goodToken = new AccessToken("good", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 11));
//noinspection unchecked
Mockito.when(credentials.refreshAccessToken())
// First call will setup a stale token
.thenReturn(staleToken)
// Second call will give a good token
.thenReturn(goodToken);
// First call - setup
HeaderCacheElement firstResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, firstResult.getCacheState());
Assert.assertThat(firstResult.header, containsString("stale"));
// Fast forward until token is stale
setTimeInMillieconds(10);
// Second call - return stale token, but schedule refresh
HeaderCacheElement secondResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Stale, secondResult.getCacheState());
Assert.assertThat(secondResult.header, containsString("stale"));
// Wait for the refresh to finish
final Future<?> waiter;
synchronized (underTest.lock) {
waiter = underTest.isRefreshing ? underTest.futureToken : Futures.immediateFuture(null);
}
waiter.get();
// Third call - now returns good token
HeaderCacheElement thirdResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, thirdResult.getCacheState());
Assert.assertThat(thirdResult.header, containsString("good"));
// Make sure that the token was only requested twice: once for the stale token & second time for the good token
Mockito.verify(credentials, times(2)).refreshAccessToken();
}
#location 22
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Override
protected boolean onOK(Metadata trailers) {
ProcessingStatus status = requestManager.onOK();
if (status == ProcessingStatus.INVALID) {
// Set an exception.
onError(INVALID_RESPONSE, trailers);
return true;
}
// There was a problem in the data found in onMessage(), so fail the RPC.
if (status == ProcessingStatus.SUCCESS || status == ProcessingStatus.NOT_RETRYABLE) {
// Set the response, with either success, or non-retryable responses.
completionFuture.set(Arrays.asList(requestManager.buildResponse()));
return true;
}
// Perform a partial retry, if the backoff policy allows it.
Long nextBackOff = getNextBackoff();
if (nextBackOff == null) {
// Return the response as is, and don't retry;
rpc.getRpcMetrics().markRetriesExhasted();
completionFuture.set(Arrays.asList(requestManager.buildResponse()));
operationSpan.addAnnotation("MutationCount", ImmutableMap.of("failureCount",
AttributeValue.longAttributeValue(requestManager.getRetryRequest().getEntriesCount())));
return true;
}
performRetry(nextBackOff);
operationSpan.addAnnotation("MutationCount", ImmutableMap.of("retryCount",
AttributeValue.longAttributeValue(requestManager.getRetryRequest().getEntriesCount())));
return false;
} | #vulnerable code
@Override
protected boolean onOK(Metadata trailers) {
ProcessingStatus status = requestManager.onOK();
if (status == ProcessingStatus.INVALID) {
// Set an exception.
onError(INVALID_RESPONSE, trailers);
return true;
}
// There was a problem in the data found in onMessage(), so fail the RPC.
if (status == ProcessingStatus.SUCCESS || status == ProcessingStatus.NOT_RETRYABLE) {
// Set the response, with either success, or non-retryable responses.
completionFuture.set(Arrays.asList(requestManager.buildResponse()));
return true;
}
// Perform a partial retry, if the backoff policy allows it.
long nextBackOff = getNextBackoff();
if (nextBackOff == BackOff.STOP) {
// Return the response as is, and don't retry;
rpc.getRpcMetrics().markRetriesExhasted();
completionFuture.set(Arrays.asList(requestManager.buildResponse()));
operationSpan.addAnnotation("MutationCount", ImmutableMap.of("failureCount",
AttributeValue.longAttributeValue(requestManager.getRetryRequest().getEntriesCount())));
return true;
}
performRetry(nextBackOff);
operationSpan.addAnnotation("MutationCount", ImmutableMap.of("retryCount",
AttributeValue.longAttributeValue(requestManager.getRetryRequest().getEntriesCount())));
return false;
}
#location 19
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testRefreshAfterFailure() throws Exception {
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
final AccessToken accessToken = new AccessToken("hi", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1));
//noinspection unchecked
Mockito.when(credentials.refreshAccessToken())
// First call will throw Exception & bypass retries
.thenThrow(new IOException())
// Second call will succeed
.thenReturn(accessToken);
// First call
HeaderCacheElement firstResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Exception, firstResult.getCacheState());
// Now the second token should be available
HeaderCacheElement secondResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, secondResult.getCacheState());
Assert.assertThat(secondResult.header, containsString("hi"));
// Make sure that the token was only requested twice: once for the first failure & second time for background recovery
Mockito.verify(credentials, times(2)).refreshAccessToken();
} | #vulnerable code
@Test
public void testRefreshAfterFailure() throws Exception {
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
underTest.rateLimiter.setRate(100000);
final AccessToken accessToken = new AccessToken("hi", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1));
//noinspection unchecked
Mockito.when(credentials.refreshAccessToken())
// First call will throw Exception & bypass retries
.thenThrow(new IOException())
// Second call will succeed
.thenReturn(accessToken);
// First call
HeaderCacheElement firstResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Exception, firstResult.getCacheState());
// Now the second token should be available
HeaderCacheElement secondResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, secondResult.getCacheState());
Assert.assertThat(secondResult.header, containsString("hi"));
// Make sure that the token was only requested twice: once for the first failure & second time for background recovery
Mockito.verify(credentials, times(2)).refreshAccessToken();
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
/*
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTimeInMillieconds(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new Object();
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest =
new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState());
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState());
// Check to make sure we're no longer refreshing.
Assert.assertFalse(underTest.isRefreshing());
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing());
} | #vulnerable code
@Test
/*
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTimeInMillieconds(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new Object();
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest =
new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
underTest.rateLimiter.setRate(100000);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState());
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState());
// Check to make sure we're no longer refreshing.
synchronized (underTest.lock) {
Assert.assertFalse(underTest.isRefreshing);
}
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing);
}
#location 28
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Override
public String authority() {
return authority;
} | #vulnerable code
@Override
public String authority() {
return delegate.authority();
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Override
public void onClose(Status status, Metadata trailers) {
try (NonThrowingCloseable s = TRACER.withSpan(operationSpan)) {
synchronized (callLock) {
call = null;
}
rpcTimerContext.close();
// OK
if (status.isOk()) {
if (onOK(trailers)) {
finalizeStats(status);
}
} else {
onError(status, trailers);
}
}
} | #vulnerable code
@Override
public void onClose(Status status, Metadata trailers) {
synchronized (callLock) {
call = null;
}
rpcTimerContext.close();
// OK
if (status.isOk()) {
if (onOK(trailers)) {
operationTimerContext.close();
}
} else {
onError(status, trailers);
}
}
#location 14
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void awaitCompletion() throws InterruptedException {
boolean performedWarning = false;
lock.lock();
try {
while (!isFlushed()) {
flushedCondition.await(finishWaitMillis, TimeUnit.MILLISECONDS);
long now = clock.nanoTime();
if (now >= noSuccessCheckDeadlineNanos) {
// There are unusual cases where an RPC could be completed, but we don't clean up
// the state and the locks. Try to clean up if there is a timeout.
for (RetryHandler retryHandler : outstandingRetries.values()) {
retryHandler.performRetryIfStale();
}
logNoSuccessWarning(now);
resetNoSuccessWarningDeadline();
performedWarning = true;
}
}
if (performedWarning) {
LOG.info("awaitCompletion() completed");
}
} finally {
lock.unlock();
}
} | #vulnerable code
public void awaitCompletion() throws InterruptedException {
boolean performedWarning = false;
lock.lock();
try {
while (!isFlushed()) {
flushedCondition.await(finishWaitMillis, TimeUnit.MILLISECONDS);
long now = clock.nanoTime();
if (now >= noSuccessWarningDeadlineNanos) {
logNoSuccessWarning(now);
resetNoSuccessWarningDeadline();
performedWarning = true;
}
}
if (performedWarning) {
LOG.info("awaitCompletion() completed");
}
} finally {
lock.unlock();
}
}
#location 10
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testRetyableMutateRow() throws Exception {
final MutateRowRequest request = MutateRowRequest.getDefaultInstance();
when(mockFuture.get()).thenReturn(Empty.getDefaultInstance());
underTest.mutateRow(request);
verify(clientCallService, times(1)).listenableAsyncCall(any(ClientCall.class), same(request));
} | #vulnerable code
@Test
public void testRetyableMutateRow() throws InterruptedException {
final MutateRowRequest request = MutateRowRequest.getDefaultInstance();
final AtomicBoolean done = new AtomicBoolean(false);
executor.submit(new Callable<Void>(){
@Override
public Void call() throws Exception {
underTest.mutateRow(request);
done.set(true);
synchronized (done) {
done.notify();
}
return null;
}
});
Thread.sleep(100);
future.set(MutateRowsResponse.getDefaultInstance());
synchronized (done) {
done.wait(1000);
}
assertTrue(done.get());
verify(clientCallService, times(1)).listenableAsyncCall(any(ClientCall.class), same(request));
}
#location 5
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
protected void cancel(final String message) {
callWrapper.cancel(message, null);
} | #vulnerable code
protected void cancel(final String message) {
call.cancel(message, null);
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
Future<HeaderCacheElement> asyncRefresh() {
LOG.trace("asyncRefresh");
synchronized (lock) {
try {
if (futureToken != null) {
return futureToken;
}
if (headerCache.getCacheState() == CacheState.Good) {
return Futures.immediateFuture(headerCache);
}
Future<HeaderCacheElement> future = executor.submit(new Callable<HeaderCacheElement>() {
@Override
public HeaderCacheElement call() throws Exception {
return updateToken();
}
});
if (!future.isDone()) {
this.futureToken = future;
}
return future;
} catch (RuntimeException e) {
futureToken = null;
LOG.warn("Got an unexpected exception while trying to refresh google credentials.", e);
return Futures.immediateFuture(new HeaderCacheElement(
Status.UNAUTHENTICATED
.withDescription("Unexpected error trying to authenticate")
.withCause(e)));
}
}
} | #vulnerable code
HeaderCacheElement syncRefresh() {
try (Closeable ss = Tracing.getTracer().spanBuilder("CredentialsRefresh").startScopedSpan()) {
return asyncRefresh().get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
} catch (InterruptedException e) {
LOG.warn("Interrupted while trying to refresh google credentials.", e);
Thread.currentThread().interrupt();
return new HeaderCacheElement(
Status.UNAUTHENTICATED
.withDescription("Authentication was interrupted.")
.withCause(e)
);
} catch (ExecutionException e) {
LOG.warn("ExecutionException while trying to refresh google credentials.", e);
return new HeaderCacheElement(
Status.UNAUTHENTICATED
.withDescription("ExecutionException during Authentication.")
.withCause(e)
);
} catch (TimeoutException e) {
LOG.warn("TimeoutException while trying to refresh google credentials.", e);
return new HeaderCacheElement(
Status.UNAUTHENTICATED
.withDescription("TimeoutException during Authentication.")
.withCause(e)
);
} catch (Exception e) {
LOG.warn("Unexpected execption while trying to refresh google credentials.", e);
return new HeaderCacheElement(
Status.UNAUTHENTICATED
.withDescription("Unexpected execption during Authentication.")
.withCause(e)
);
}
}
#location 3
#vulnerability type INTERFACE_NOT_THREAD_SAFE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public static Credentials getCredentials(CredentialOptions options)
throws IOException, GeneralSecurityException {
return patchCredentials(getCredentialsInner(options));
} | #vulnerable code
public static Credentials getCredentials(CredentialOptions options)
throws IOException, GeneralSecurityException {
switch (options.getCredentialType()) {
case DefaultCredentials:
return getApplicationDefaultCredential();
case P12:
P12CredentialOptions p12Options = (P12CredentialOptions) options;
return getCredentialFromPrivateKeyServiceAccount(
p12Options.getServiceAccount(), p12Options.getKeyFile());
case SuppliedCredentials:
return ((UserSuppliedCredentialOptions) options).getCredential();
case SuppliedJson:
JsonCredentialsOptions jsonCredentialsOptions = (JsonCredentialsOptions) options;
synchronized (jsonCredentialsOptions) {
if (jsonCredentialsOptions.getCachedCredentials() == null) {
jsonCredentialsOptions.setCachedCredentails(
getInputStreamCredential(jsonCredentialsOptions.getInputStream()));
}
return jsonCredentialsOptions.getCachedCredentials();
}
case None:
return null;
default:
throw new IllegalStateException(
"Cannot process Credential type: " + options.getCredentialType());
}
}
#location 17
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
protected void run() {
try (Scope scope = TRACER.withSpan(operationSpan)) {
rpcTimerContext = rpc.getRpcMetrics().timeRpc();
operationSpan.addAnnotation(Annotation.fromDescriptionAndAttributes("rpcStart",
ImmutableMap.of("attempt", AttributeValue.longAttributeValue(failedCount))));
Metadata metadata = new Metadata();
metadata.merge(originalMetadata);
callWrapper.setCallAndStart(rpc, getRpcCallOptions(), getRetryRequest(), this, metadata);
} catch (Exception e) {
setException(e);
}
} | #vulnerable code
protected void run() {
try (Scope scope = TRACER.withSpan(operationSpan)) {
rpcTimerContext = rpc.getRpcMetrics().timeRpc();
operationSpan.addAnnotation(Annotation.fromDescriptionAndAttributes("rpcStart",
ImmutableMap.of("attempt", AttributeValue.longAttributeValue(failedCount))));
Metadata metadata = new Metadata();
metadata.merge(originalMetadata);
synchronized (callLock) {
// There's a subtle race condition in RetryingStreamOperation which requires a separate
// newCall/start split. The call variable needs to be set before onMessage() happens; that
// usually will occur, but some unit tests broke with a merged newCall and start.
call = rpc.newCall(getRpcCallOptions());
rpc.start(getRetryRequest(), this, metadata, call);
}
} catch (Exception e) {
setException(e);
}
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
Future<HeaderCacheElement> asyncRefresh() {
LOG.trace("asyncRefresh");
synchronized (lock) {
try {
if (futureToken != null) {
return futureToken;
}
if (headerCache.getCacheState() == CacheState.Good) {
return Futures.immediateFuture(headerCache);
}
Future<HeaderCacheElement> future = executor.submit(new Callable<HeaderCacheElement>() {
@Override
public HeaderCacheElement call() throws Exception {
return updateToken();
}
});
if (!future.isDone()) {
this.futureToken = future;
}
return future;
} catch (RuntimeException e) {
futureToken = null;
LOG.warn("Got an unexpected exception while trying to refresh google credentials.", e);
return Futures.immediateFuture(new HeaderCacheElement(
Status.UNAUTHENTICATED
.withDescription("Unexpected error trying to authenticate")
.withCause(e)));
}
}
} | #vulnerable code
HeaderCacheElement syncRefresh() {
try (Closeable ss = Tracing.getTracer().spanBuilder("CredentialsRefresh").startScopedSpan()) {
return asyncRefresh().get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
} catch (InterruptedException e) {
LOG.warn("Interrupted while trying to refresh google credentials.", e);
Thread.currentThread().interrupt();
return new HeaderCacheElement(
Status.UNAUTHENTICATED
.withDescription("Authentication was interrupted.")
.withCause(e)
);
} catch (ExecutionException e) {
LOG.warn("ExecutionException while trying to refresh google credentials.", e);
return new HeaderCacheElement(
Status.UNAUTHENTICATED
.withDescription("ExecutionException during Authentication.")
.withCause(e)
);
} catch (TimeoutException e) {
LOG.warn("TimeoutException while trying to refresh google credentials.", e);
return new HeaderCacheElement(
Status.UNAUTHENTICATED
.withDescription("TimeoutException during Authentication.")
.withCause(e)
);
} catch (Exception e) {
LOG.warn("Unexpected execption while trying to refresh google credentials.", e);
return new HeaderCacheElement(
Status.UNAUTHENTICATED
.withDescription("Unexpected execption during Authentication.")
.withCause(e)
);
}
}
#location 4
#vulnerability type INTERFACE_NOT_THREAD_SAFE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
/*
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTimeInMillieconds(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new Object();
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest =
new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState());
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState());
// Check to make sure we're no longer refreshing.
Assert.assertFalse(underTest.isRefreshing());
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing());
} | #vulnerable code
@Test
/*
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTimeInMillieconds(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new Object();
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest =
new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
underTest.rateLimiter.setRate(100000);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState());
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState());
// Check to make sure we're no longer refreshing.
synchronized (underTest.lock) {
Assert.assertFalse(underTest.isRefreshing);
}
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing);
}
#location 43
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@SuppressWarnings("unchecked")
@Override
public void run() {
try {
// restart the clock.
synchronized (callLock) {
super.run();
// pre-fetch one more result, for performance reasons.
adapter.request(1);
if (rowObserver instanceof ClientResponseObserver) {
((ClientResponseObserver<ReadRowsRequest, FlatRow>) rowObserver).beforeStart(adapter);
}
lastResponseMs = clock.currentTimeMillis();
}
} catch (Exception e) {
setException(e);
}
} | #vulnerable code
@SuppressWarnings("unchecked")
@Override
public void run() {
try {
// restart the clock.
this.rowMerger = new RowMerger(rowObserver);
adapter = new CallToStreamObserverAdapter();
synchronized (callLock) {
super.run();
// pre-fetch one more result, for performance reasons.
adapter.request(1);
if (rowObserver instanceof ClientResponseObserver) {
((ClientResponseObserver<ReadRowsRequest, FlatRow>) rowObserver).beforeStart(adapter);
}
lastResponseMs = clock.currentTimeMillis();
}
} catch (Exception e) {
setException(e);
}
}
#location 7
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public IBigtableDataClient getClientWrapper() {
if (options.useGCJClient()) {
return dataGCJClient;
} else {
return new BigtableDataClientWrapper(dataClient, getDataRequestContext());
}
} | #vulnerable code
public IBigtableDataClient getClientWrapper() {
if (options.useGCJClient()) {
if (this.dataGCJClient == null) {
synchronized (BigtableSession.this) {
try {
if (dataGCJClient == null) {
BigtableDataSettings dataSettings =
BigtableVeneerSettingsFactory.createBigtableDataSettings(options);
this.dataGCJClient = new BigtableDataGCJClient(
com.google.cloud.bigtable.data.v2.BigtableDataClient.create(dataSettings));
}
} catch (IOException ioException) {
throw new RuntimeException(ioException);
}
}
}
return dataGCJClient;
} else {
return new BigtableDataClientWrapper(dataClient, getDataRequestContext());
}
}
#location 17
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Override
public void onClose(Status status, Metadata trailers) {
try (NonThrowingCloseable s = TRACER.withSpan(operationSpan)) {
synchronized (callLock) {
call = null;
}
rpcTimerContext.close();
// OK
if (status.isOk()) {
if (onOK(trailers)) {
finalizeStats(status);
}
} else {
onError(status, trailers);
}
}
} | #vulnerable code
@Override
public void onClose(Status status, Metadata trailers) {
synchronized (callLock) {
call = null;
}
rpcTimerContext.close();
// OK
if (status.isOk()) {
if (onOK(trailers)) {
operationTimerContext.close();
}
} else {
onError(status, trailers);
}
}
#location 11
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testRefreshAfterFailure() throws Exception {
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
final AccessToken accessToken = new AccessToken("hi", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1));
//noinspection unchecked
Mockito.when(credentials.refreshAccessToken())
// First call will throw Exception & bypass retries
.thenThrow(new IOException())
// Second call will succeed
.thenReturn(accessToken);
// First call
HeaderCacheElement firstResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Exception, firstResult.getCacheState());
// Now the second token should be available
HeaderCacheElement secondResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, secondResult.getCacheState());
Assert.assertThat(secondResult.header, containsString("hi"));
// Make sure that the token was only requested twice: once for the first failure & second time for background recovery
Mockito.verify(credentials, times(2)).refreshAccessToken();
} | #vulnerable code
@Test
public void testRefreshAfterFailure() throws Exception {
underTest = new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
underTest.rateLimiter.setRate(100000);
final AccessToken accessToken = new AccessToken("hi", new Date(HeaderCacheElement.TOKEN_STALENESS_MS + 1));
//noinspection unchecked
Mockito.when(credentials.refreshAccessToken())
// First call will throw Exception & bypass retries
.thenThrow(new IOException())
// Second call will succeed
.thenReturn(accessToken);
// First call
HeaderCacheElement firstResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Exception, firstResult.getCacheState());
// Now the second token should be available
HeaderCacheElement secondResult = underTest.getHeaderSafe();
Assert.assertEquals(CacheState.Good, secondResult.getCacheState());
Assert.assertThat(secondResult.header, containsString("hi"));
// Make sure that the token was only requested twice: once for the first failure & second time for background recovery
Mockito.verify(credentials, times(2)).refreshAccessToken();
}
#location 20
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
/*
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTimeInMillieconds(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new Object();
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest =
new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState());
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState());
// Check to make sure we're no longer refreshing.
Assert.assertFalse(underTest.isRefreshing());
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing());
} | #vulnerable code
@Test
/*
* Test that checks that concurrent requests to RefreshingOAuth2CredentialsInterceptor refresh
* logic doesn't cause hanging behavior. Specifically, when an Expired condition occurs it
* triggers a call to syncRefresh() which potentially waits for refresh that was initiated
* from another thread either through syncRefresh() or asyncRefresh(). This test case simulates
* that condition.
*/
public void testRefreshDoesntHang() throws Exception {
// Assume that the user starts at this time... it's an arbitrarily big number which will
// assure that subtracting HeaderCacheElement.TOKEN_STALENESS_MS and TOKEN_EXPIRES_MS will not
// be negative.
long start = HeaderCacheElement.TOKEN_STALENESS_MS * 10;
setTimeInMillieconds(start);
// RefreshingOAuth2CredentialsInterceptor will show that the access token is stale.
final long expiration = start + HeaderCacheElement.TOKEN_EXPIRES_MS + 1;
// Create a mechanism that will allow us to control when the accessToken is returned.
// credentials.refreshAccessToken() will get called asynchronously and will wait until the
// lock is notified before returning. That will allow us to set up multiple concurrent calls
final Object lock = new Object();
Mockito.when(credentials.refreshAccessToken()).thenAnswer(new Answer<AccessToken>() {
@Override
public AccessToken answer(InvocationOnMock invocation) throws Throwable {
synchronized (lock) {
lock.wait();
}
return new AccessToken("", new Date(expiration));
}
});
// Force a synchronous refresh. This ought to wait until a refresh happening in another thread
// completes.
Callable<Void> syncRefreshCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.syncRefresh();
return null;
}
};
underTest =
new RefreshingOAuth2CredentialsInterceptor(executorService, credentials);
underTest.rateLimiter.setRate(100000);
// At this point, the access token wasn't retrieved yet. The
// RefreshingOAuth2CredentialsInterceptor considers null to be Expired.
Assert.assertEquals(CacheState.Expired, underTest.headerCache.getCacheState());
syncCall(lock, syncRefreshCallable);
// Check to make sure that the AccessToken was retrieved.
Assert.assertEquals(CacheState.Stale, underTest.headerCache.getCacheState());
// Check to make sure we're no longer refreshing.
synchronized (underTest.lock) {
Assert.assertFalse(underTest.isRefreshing);
}
// Kick off a couple of asynchronous refreshes. Kicking off more than one shouldn't be
// necessary, but also should not be harmful, since there are likely to be multiple concurrent
// requests that call asyncRefresh() when the token turns stale.
underTest.asyncRefresh();
underTest.asyncRefresh();
underTest.asyncRefresh();
syncCall(lock, syncRefreshCallable);
Assert.assertFalse(underTest.isRefreshing);
}
#location 54
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if (xlearningAppType.equals("LIGHTLDA")) {
if (this.role.equals(XLearningConstants.PS)) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightLDALocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = this.index + " " + address.getHostAddress() + ":" + this.lightLDALocalPort;
this.lightLDAEndpoint = address.getHostAddress() + ":" + this.lightLDALocalPort;
LOG.info("lightLDA ip port string is: " + ipPortStr);
amClient.reportLightLDAIpPort(containerId, ipPortStr);
}
if (this.role.equals(XLearningConstants.WORKER)) {
String lightLDAIpPortStr;
while (true) {
lightLDAIpPortStr = amClient.getLightLDAIpPortStr();
if (lightLDAIpPortStr != null) {
LOG.info("lightLDA IP PORT list is: " + lightLDAIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightLDAIpPortStr, type);
PrintWriter writer = new PrintWriter("lightLDAEndPoints.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!single) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
} else if (xlearningAppType.equals("LIGHTLDA")) {
envList.add("LIGHTLDA_WORKER_NUM=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTLDA_WORKER_NUM.toString()));
envList.add("LIGHTLDA_SERVER_NUM=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTLDA_PS_NUM.toString()));
envList.add("LIGHTLDA_RANK=" + this.index);
envList.add("LIGHTLDA_SERVER_ENDPOINT=" + this.lightLDAEndpoint);
envList.add("LIGHTLDA_ROLE=" + this.role);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
} | #vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 196
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
} | #vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
if (this.role.equals(XLearningConstants.WORKER)) {
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
}
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 399
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
} | #vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
if (this.role.equals(XLearningConstants.WORKER)) {
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
}
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 399
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
String[] env = null;
if ("TENSORFLOW".equals(xlearningAppType)) {
if (single) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index,
XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef,
XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index,
XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
} else if (xlearningAppType.equals("MXNET")) {
if (singleMx) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else if (xlearningAppType.equals("DISTXGBOOST")) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"),
"DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"),
"DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"),
"PYTHONUNBUFFERED=1",
"DMLC_TASK_ID=" + this.index,
"DMLC_ROLE=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()),
"LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort,
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"),
"DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"),
"DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"),
"DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"),
"PYTHONUNBUFFERED=1",
dmlcID + "=" + this.index,
"DMLC_ROLE=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
} else {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
} catch (EOFException e) {
finished = true;
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
}
osw.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start tensorboard process
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand = "tensorboard --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing tensorborad command:" + boardCommand);
boardReservedSocket.close();
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of tensorboard process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of tensorboard process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report tensorboard url:" + boardUrl);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
} | #vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
String[] env = null;
if ("TENSORFLOW".equals(xlearningAppType)) {
if (single) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index,
XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef,
XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index,
XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
} else if (xlearningAppType.equals("MXNET")) {
if (singleMx) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else if (xlearningAppType.equals("DISTXGBOOST")) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"),
"DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"),
"DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"),
"PYTHONUNBUFFERED=1",
"DMLC_TASK_ID=" + this.index,
"DMLC_ROLE=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"),
"DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"),
"DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"),
"DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"),
"PYTHONUNBUFFERED=1",
dmlcID + "=" + this.index,
"DMLC_ROLE=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
} else {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
} catch (EOFException e) {
finished = true;
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
}
osw.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start tensorboard process
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand = "tensorboard --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing tensorborad command:" + boardCommand);
boardReservedSocket.close();
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of tensorboard process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of tensorboard process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report tensorboard url:" + boardUrl);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 186
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if (xlearningAppType.equals("LIGHTLDA")) {
if (this.role.equals(XLearningConstants.PS)) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightLDALocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = this.index + " " + address.getHostAddress() + ":" + this.lightLDALocalPort;
this.lightLDAEndpoint = address.getHostAddress() + ":" + this.lightLDALocalPort;
LOG.info("lightLDA ip port string is: " + ipPortStr);
amClient.reportLightLDAIpPort(containerId, ipPortStr);
}
if (this.role.equals(XLearningConstants.WORKER)) {
String lightLDAIpPortStr;
while (true) {
lightLDAIpPortStr = amClient.getLightLDAIpPortStr();
if (lightLDAIpPortStr != null) {
LOG.info("lightLDA IP PORT list is: " + lightLDAIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightLDAIpPortStr, type);
PrintWriter writer = new PrintWriter("lightLDAEndPoints.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!single) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
} else if (xlearningAppType.equals("LIGHTLDA")) {
envList.add("LIGHTLDA_WORKER_NUM=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTLDA_WORKER_NUM.toString()));
envList.add("LIGHTLDA_SERVER_NUM=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTLDA_PS_NUM.toString()));
envList.add("LIGHTLDA_RANK=" + this.index);
envList.add("LIGHTLDA_SERVER_ENDPOINT=" + this.lightLDAEndpoint);
envList.add("LIGHTLDA_ROLE=" + this.role);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
} | #vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 196
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
} | #vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
if (this.role.equals(XLearningConstants.WORKER)) {
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
}
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 392
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if (xlearningAppType.equals("LIGHTLDA")) {
if (this.role.equals(XLearningConstants.PS)) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightLDALocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = this.index + " " + address.getHostAddress() + ":" + this.lightLDALocalPort;
this.lightLDAEndpoint = address.getHostAddress() + ":" + this.lightLDALocalPort;
LOG.info("lightLDA ip port string is: " + ipPortStr);
amClient.reportLightLDAIpPort(containerId, ipPortStr);
}
if (this.role.equals(XLearningConstants.WORKER)) {
String lightLDAIpPortStr;
while (true) {
lightLDAIpPortStr = amClient.getLightLDAIpPortStr();
if (lightLDAIpPortStr != null) {
LOG.info("lightLDA IP PORT list is: " + lightLDAIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightLDAIpPortStr, type);
PrintWriter writer = new PrintWriter("lightLDAEndPoints.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!single) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
} else if (xlearningAppType.equals("LIGHTLDA")) {
envList.add("LIGHTLDA_WORKER_NUM=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTLDA_WORKER_NUM.toString()));
envList.add("LIGHTLDA_SERVER_NUM=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTLDA_PS_NUM.toString()));
envList.add("LIGHTLDA_RANK=" + this.index);
envList.add("LIGHTLDA_SERVER_ENDPOINT=" + this.lightLDAEndpoint);
envList.add("LIGHTLDA_ROLE=" + this.role);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
} | #vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 144
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
} | #vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
List<String> envList = new ArrayList<>(20);
envList.add("PATH=" + System.getenv("PATH"));
envList.add("JAVA_HOME=" + System.getenv("JAVA_HOME"));
envList.add("HADOOP_HOME=" + System.getenv("HADOOP_HOME"));
envList.add("HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"));
envList.add("LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native");
envList.add("CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"));
envList.add("PYTHONUNBUFFERED=1");
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if ("TENSORFLOW".equals(xlearningAppType)) {
envList.add(XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index);
envList.add(XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role);
if (!single) {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
envList.add(XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef);
}
} else if (xlearningAppType.equals("MXNET")) {
if (!singleMx) {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
envList.add("DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"));
envList.add("DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"));
envList.add(dmlcID + "=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
}
} else if (xlearningAppType.equals("DISTXGBOOST")) {
envList.add("DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"));
envList.add("DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"));
envList.add("DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"));
envList.add("DMLC_TASK_ID=" + this.index);
envList.add("DMLC_ROLE=" + this.role);
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
envList.add("LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()));
envList.add("LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort);
}
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("PLACEHOLDER")) {
envList.add(XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList);
if (envList.toString().length() > conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH)) {
LOG.warn("Current container environments length " + envList.toString().length() + " exceed the configuration " + XLearningConfiguration.XLEARNING_ENV_MAXLENGTH + " " + conf.getInt(XLearningConfiguration.XLEARNING_ENV_MAXLENGTH, XLearningConfiguration.DEFAULT_XLEARNING_ENV_MAXLENGTH));
envList.remove(envList.size() - 1);
LOG.warn("InputFile list had written to local file: inputFileList.txt !!");
PrintWriter writer = new PrintWriter("inputFileList.txt", "UTF-8");
writer.println(this.inputFileList);
writer.close();
}
}
String[] env = envList.toArray(new String[envList.size()]);
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).toUpperCase().equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if (j == 0 && isCache) {
if (conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if ((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if (isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
if (this.role.equals(XLearningConstants.WORKER)) {
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
}
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS) && this.xlearningAppType.equals("TENSORFLOW")) {
if (code == -1 || code == 0) {
this.uploadOutputFiles();
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 403
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
String[] env = null;
if ("TENSORFLOW".equals(xlearningAppType)) {
if (single) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index,
XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef,
XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index,
XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
} else if (xlearningAppType.equals("MXNET")) {
if (singleMx) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else if (xlearningAppType.equals("DISTXGBOOST")) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"),
"DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"),
"DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"),
"PYTHONUNBUFFERED=1",
"DMLC_TASK_ID=" + this.index,
"DMLC_ROLE=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()),
"LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort,
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"),
"DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"),
"DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"),
"DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"),
"PYTHONUNBUFFERED=1",
dmlcID + "=" + this.index,
"DMLC_ROLE=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
} else {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
File gzFile = new File(conf.get(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHEFILE_NAME, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHEFILE_NAME));
GZIPOutputStream gos = new GZIPOutputStream(new FileOutputStream(gzFile));
boolean isCache = conf.getBoolean(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHE, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHE);
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
if(j == 0 && isCache) {
if(conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH) > 1) {
gos.write(value.toString().getBytes());
gos.write("\n".getBytes());
if((gzFile.length() / 1024 / 1024) > conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT)) {
LOG.info("Inputformat cache file size is:" + gzFile.length() / 1024 / 1024 + "M "
+ "beyond the limit size:" + conf.getInt(XLearningConfiguration.XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT, XLearningConfiguration.DEFAULT_XLEARNING_INPUTFORMAT_CACHESIZE_LIMIT) + "M.");
gzFile.delete();
LOG.info("Local cache file deleted and will not use cache.");
isCache = false;
}
}
}
} catch (EOFException e) {
finished = true;
e.printStackTrace();
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
if(isCache) {
break;
}
}
osw.close();
gos.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing board command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
if (this.role.equals(XLearningConstants.WORKER)) {
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
}
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
} | #vulnerable code
private Boolean run() throws IOException {
try {
if (this.role.equals(XLearningConstants.WORKER)) {
prepareInputFiles();
}
if (this.conf.getBoolean(XLearningConfiguration.XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_AUTO_CREATE_OUTPUT_DIR)) {
createLocalOutputDir();
}
} catch (InterruptedException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
} catch (ExecutionException e) {
LOG.error("Container prepare inputs failed!", e);
this.reportFailedAndExit();
}
if ("TENSORFLOW".equals(xlearningAppType) && !single) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
amClient.reportReservedPort(envs.get(ApplicationConstants.Environment.NM_HOST.toString()),
reservedSocket.getLocalPort(), this.role, this.index);
while (true) {
//TODO may be need encode use Base64 while used in Env
this.clusterDef = amClient.getClusterDef();
if (this.clusterDef != null) {
LOG.info("Cluster def is: " + this.clusterDef);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
}
if (xlearningAppType.equals("DISTLIGHTGBM")) {
LOG.info("Reserved available port: " + reservedSocket.getLocalPort());
this.lightGBMLocalPort = reservedSocket.getLocalPort();
InetAddress address = null;
try {
address = InetAddress.getByName(envs.get(ApplicationConstants.Environment.NM_HOST.toString()));
} catch (UnknownHostException e) {
LOG.info("acquire host ip failed " + e);
reportFailedAndExit();
}
String ipPortStr = address.getHostAddress() + " " + reservedSocket.getLocalPort();
LOG.info("lightGBM ip port string is: " + ipPortStr);
amClient.reportLightGbmIpPort(containerId, ipPortStr);
String lightGBMIpPortStr;
while (true) {
//TODO may be need encode use Base64 while used in Env
lightGBMIpPortStr = amClient.getLightGbmIpPortStr();
if (lightGBMIpPortStr != null) {
LOG.info("lightGBM IP PORT list is: " + lightGBMIpPortStr);
break;
}
Utilities.sleep(this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL));
}
Type type = new TypeToken<ConcurrentHashMap<String, String>>() {
}.getType();
ConcurrentHashMap<String, String> map = new Gson().fromJson(lightGBMIpPortStr, type);
PrintWriter writer = new PrintWriter("lightGBMlist.txt", "UTF-8");
for (String str : map.keySet()) {
writer.println(map.get(str));
}
writer.close();
}
String[] env = null;
if ("TENSORFLOW".equals(xlearningAppType)) {
if (single) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index,
XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else {
/**
* set TF_CLUSTER_DEF in env
* python script can load cluster def use "json.loads(os.environ["CLUSTER_DEF"])"
*/
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_TF_CLUSTER_DEF.toString() + "=" + this.clusterDef,
XLearningConstants.Environment.XLEARNING_TF_INDEX.toString() + "=" + this.index,
XLearningConstants.Environment.XLEARNING_TF_ROLE.toString() + "=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
} else if (xlearningAppType.equals("MXNET")) {
if (singleMx) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else if (xlearningAppType.equals("DISTXGBOOST")) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"DMLC_TRACKER_URI=" + System.getenv("DMLC_TRACKER_URI"),
"DMLC_TRACKER_PORT=" + System.getenv("DMLC_TRACKER_PORT"),
"DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"),
"PYTHONUNBUFFERED=1",
"DMLC_TASK_ID=" + this.index,
"DMLC_ROLE=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else if (xlearningAppType.equals("DISTLIGHTGBM")) {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"LIGHTGBM_NUM_MACHINE=" + System.getenv(XLearningConstants.Environment.XLEARNING_LIGHTGBM_WORKER_NUM.toString()),
"LIGHTGBM_LOCAL_LISTEN_PORT=" + this.lightGBMLocalPort,
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
} else {
String dmlcID;
if (this.role.equals("worker")) {
dmlcID = "DMLC_WORKER_ID";
} else {
dmlcID = "DMLC_SERVER_ID";
}
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"DMLC_PS_ROOT_URI=" + System.getenv("DMLC_PS_ROOT_URI"),
"DMLC_PS_ROOT_PORT=" + System.getenv("DMLC_PS_ROOT_PORT"),
"DMLC_NUM_WORKER=" + System.getenv("DMLC_NUM_WORKER"),
"DMLC_NUM_SERVER=" + System.getenv("DMLC_NUM_SERVER"),
"PYTHONUNBUFFERED=1",
dmlcID + "=" + this.index,
"DMLC_ROLE=" + this.role,
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
} else {
env = new String[]{
"PATH=" + System.getenv("PATH"),
"JAVA_HOME=" + System.getenv("JAVA_HOME"),
"HADOOP_HOME=" + System.getenv("HADOOP_HOME"),
"HADOOP_HDFS_HOME=" + System.getenv("HADOOP_HDFS_HOME"),
"LD_LIBRARY_PATH=" + "./:" + System.getenv("LD_LIBRARY_PATH") + ":" + System.getenv("JAVA_HOME") +
"/jre/lib/amd64/server:" + System.getenv("HADOOP_HOME") + "/lib/native",
"CLASSPATH=" + "./:" + System.getenv("CLASSPATH") + ":" + System.getProperty("java.class.path"),
"PYTHONUNBUFFERED=1",
XLearningConstants.Environment.XLEARNING_INPUT_FILE_LIST.toString() + "=" + this.inputFileList
};
}
String command = envs.get(XLearningConstants.Environment.XLEARNING_EXEC_CMD.toString());
LOG.info("Executing command:" + command);
Runtime rt = Runtime.getRuntime();
//close reserved socket as tf will bind this port later
this.reservedSocket.close();
final Process xlearningProcess = rt.exec(command, env);
Date now = new Date();
heartbeatThread.setContainersStartTime(now.toString());
if (conf.get(XLearningConfiguration.XLEARNING_INPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_INPUT_STRATEGY).equals("STREAM")) {
LOG.info("Starting thread to redirect stdin of xlearning process");
Thread stdinRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
OutputStreamWriter osw = new OutputStreamWriter(xlearningProcess.getOutputStream());
List<InputSplit> inputs = Arrays.asList(amClient.getStreamInputSplit(containerId));
JobConf jobConf = new JobConf(conf);
RecordReader reader;
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_INPUTF0RMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_INPUTF0RMAT_CLASS, InputFormat.class),
jobConf);
for (int j = 0; j < conf.getInt(XLearningConfiguration.XLEARNING_STREAM_EPOCH, XLearningConfiguration.DEFAULT_XLEARNING_STREAM_EPOCH); j++) {
LOG.info("Epoch " + (j + 1) + " starting...");
for (int i = 0, len = inputs.size(); i < len; i++) {
LOG.info("split " + (i + 1) + " is handling...");
reader = inputFormat.getRecordReader(inputs.get(i), jobConf, Reporter.NULL);
Object key = reader.createKey();
Object value = reader.createValue();
Boolean finished = false;
while (!finished) {
try {
finished = !reader.next(key, value);
if (finished) {
break;
}
osw.write(value.toString());
osw.write("\n");
} catch (EOFException e) {
finished = true;
}
}
reader.close();
LOG.info("split " + (i + 1) + " is finished.");
}
LOG.info("Epoch " + (j + 1) + " finished.");
}
osw.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdinRedirectThread");
e.printStackTrace();
}
}
});
stdinRedirectThread.start();
}
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
if ((this.conf.get(XLearningConfiguration.XLEARNING_OUTPUT_STRATEGY, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUT_STRATEGY).equals("STREAM")) && outputs.size() > 0) {
LOG.info("Starting thread to redirect stream stdout of xlearning process");
final Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
List<OutputInfo> outputs = Arrays.asList(amClient.getOutputLocation());
JobConf jobConf = new JobConf(conf);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setBoolean("mapred.output.compress", true);
jobConf.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
jobConf.setOutputFormat(TextMultiOutputFormat.class);
Path remotePath = new Path(outputs.get(0).getDfsLocation() + "/_temporary/" + containerId.toString());
FileSystem dfs = remotePath.getFileSystem(jobConf);
jobConf.set(XLearningConstants.STREAM_OUTPUT_DIR, remotePath.makeQualified(dfs).toString());
OutputFormat outputFormat = ReflectionUtils.newInstance(conf.getClass(XLearningConfiguration.XLEARNING_OUTPUTFORMAT_CLASS, XLearningConfiguration.DEFAULT_XLEARNING_OUTPUTF0RMAT_CLASS, OutputFormat.class),
jobConf);
outputFormat.checkOutputSpecs(dfs, jobConf);
JobID jobID = new JobID(new SimpleDateFormat("yyyyMMddHHmm").format(new Date()), 0);
TaskAttemptID taId = new TaskAttemptID(new TaskID(jobID, true, 0), 0);
jobConf.set("mapred.tip.id", taId.getTaskID().toString());
jobConf.set("mapred.task.id", taId.toString());
jobConf.set("mapred.job.id", jobID.toString());
amClient.reportMapedTaskID(containerId, taId.toString());
RecordWriter writer = outputFormat.getRecordWriter(dfs, jobConf, "part-r", Reporter.NULL);
String xlearningStreamResultLine;
while ((xlearningStreamResultLine = reader.readLine()) != null) {
writer.write(null, xlearningStreamResultLine);
}
writer.close(Reporter.NULL);
reader.close();
dfs.close();
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
} else {
LOG.info("Starting thread to redirect stdout of xlearning process");
Thread stdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getInputStream()));
String xlearningStdoutLog;
while ((xlearningStdoutLog = reader.readLine()) != null) {
LOG.info(xlearningStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread stdoutRedirectThread");
e.printStackTrace();
}
}
});
stdoutRedirectThread.start();
}
LOG.info("Starting thread to redirect stderr of xlearning process");
Thread stderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(xlearningProcess.getErrorStream()));
String xlearningStderrLog;
while ((xlearningStderrLog = reader.readLine()) != null) {
if (xlearningStderrLog.contains("reporter progress")) {
heartbeatThread.setProgressLog(xlearningStderrLog);
} else {
LOG.info(xlearningStderrLog);
}
}
} catch (Exception e) {
LOG.warn("Error in thread stderrRedirectThread");
e.printStackTrace();
}
}
});
stderrRedirectThread.start();
heartbeatThread.setContainerStatus(XLearningContainerStatus.RUNNING);
//Start board process
int boardIndex = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_WORKER_INDEX, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_WORKER_INDEX);
Boolean boardEnable = this.conf.getBoolean(XLearningConfiguration.XLEARNING_TF_BOARD_ENABLE, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_ENABLE);
if (boardEnable && this.role.equals(XLearningConstants.WORKER) && boardIndex == this.index) {
Socket boardReservedSocket = new Socket();
try {
boardReservedSocket.bind(new InetSocketAddress("127.0.0.1", 0));
} catch (IOException e) {
LOG.error("Can not get available port");
reportFailedAndExit();
}
String boardHost = envs.get(ApplicationConstants.Environment.NM_HOST.toString());
String boardLogDir = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_LOG_DIR, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_LOG_DIR);
int boardPort = boardReservedSocket.getLocalPort();
String boardCommand;
if ("TENSORFLOW".equals(xlearningAppType)) {
int boardReloadInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_TF_BOARD_RELOAD_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_RELOAD_INTERVAL);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_TF_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_TF_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --reload_interval=" + boardReloadInterval + " --logdir=" + boardLogDir;
} else {
int boardCacheTimeout = this.conf.getInt(XLearningConfiguration.XLEARNING_BOARD_CACHE_TIMEOUT, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_CACHE_TIMEOUT);
boardCommand = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_PATH, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_PATH) + " --host=" + boardHost + " --port=" + boardPort + " --logdir=" + boardLogDir + " --cache_timeout=" + boardCacheTimeout;
String modelpb = this.conf.get(XLearningConfiguration.XLEARNING_BOARD_MODELPB, XLearningConfiguration.DEFAULT_XLEARNING_BOARD_MODELPB);
if (!(modelpb.equals("") || modelpb == null)) {
boardCommand = boardCommand + " --model_pb=" + modelpb;
}
}
String boardUrl = "http://" + boardHost + ":" + boardPort;
LOG.info("Executing borad command:" + boardCommand);
boardReservedSocket.close();
try {
final Process boardProcess = rt.exec(boardCommand, env);
LOG.info("Starting thread to redirect stdout of board process");
Thread boardStdoutRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getInputStream()));
String boardStdoutLog;
while ((boardStdoutLog = reader.readLine()) != null) {
LOG.debug(boardStdoutLog);
}
} catch (Exception e) {
LOG.warn("Exception in thread boardStdoutRedirectThread");
e.printStackTrace();
}
}
});
boardStdoutRedirectThread.start();
LOG.info("Starting thread to redirect stderr of board process");
Thread boardStderrRedirectThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(boardProcess.getErrorStream()));
String boardStderrLog;
while ((boardStderrLog = reader.readLine()) != null) {
LOG.debug(boardStderrLog);
}
} catch (Exception e) {
LOG.warn("Error in thread boardStderrRedirectThread");
e.printStackTrace();
}
}
});
boardStderrRedirectThread.start();
amClient.reportTensorBoardURL(boardUrl);
LOG.info("Container index is " + index + ", report board url:" + boardUrl);
} catch (Exception e) {
LOG.error("Board Process failed. For more detail: " + e);
}
}
int updateAppStatusInterval = this.conf.getInt(XLearningConfiguration.XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL, XLearningConfiguration.DEFAULT_XLEARNING_CONTAINER_UPDATE_APP_STATUS_INTERVAL);
if (this.role.equals(XLearningConstants.WORKER)) {
this.xlearningCmdProcessId = getPidOfProcess(xlearningProcess);
LOG.info("xlearningCmdProcessId is:" + this.xlearningCmdProcessId);
containerReporter = new ContainerReporter(amClient, conf, containerId, this.xlearningCmdProcessId);
containerReporter.setDaemon(true);
containerReporter.start();
}
int code = -1;
while (code == -1 && !heartbeatThread.isXLearningTrainCompleted()) {
Utilities.sleep(updateAppStatusInterval);
try {
code = xlearningProcess.exitValue();
} catch (IllegalThreadStateException e) {
LOG.debug("XLearning Process is running");
}
}
if (this.role.equals(XLearningConstants.PS)) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
if (this.role.equals("server")) {
if (code == -1) {
xlearningProcess.destroy();
return true;
} else if (code == 0) {
return true;
}
return false;
}
//As role is worker
if (code == 0) {
this.uploadOutputFiles();
} else {
return false;
}
return true;
}
#location 234
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private void writeToFile(String message, String path) throws IOException {
if(StringUtils.isEmpty(message) || StringUtils.isEmpty(path)){
return ;
}
PrintWriter out = null;
try {
out = new PrintWriter(new BufferedWriter(new FileWriter(path, true)));
out.println(message);
out.flush();
} finally {
if( null != out ) {
out.close();
}
}
} | #vulnerable code
private void writeToFile(String message, String path) throws IOException{
if(StringUtils.isEmpty(message) || StringUtils.isEmpty(path)){
return ;
}
RandomAccessFile rf = new RandomAccessFile(path, "rw");
rf.seek(rf.length());
rf.write(message.getBytes());
rf.close();
}
#location 4
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private static synchronized void main(final String args, final Instrumentation inst) {
try {
// 传递的args参数分两个部分:agentJar路径和agentArgs
// 分别是Agent的JAR包路径和期望传递到服务端的参数
final int index = args.indexOf(";");
final String agentJar = args.substring(0, index);
final String agentArgs = args.substring(index, args.length());
// 构造自定义的类加载器,尽量减少Greys对现有工程的侵蚀
final ClassLoader agentLoader = loadOrDefineClassLoader(agentJar);
// Configure类定义
final Class<?> classOfConfigure = agentLoader.loadClass("com.github.ompc.greys.Configure");
// GaServer类定义
final Class<?> classOfGaServer = agentLoader.loadClass("com.github.ompc.greys.server.GaServer");
// 反序列化成Configure类实例
final Object objectOfConfigure = classOfConfigure.getMethod("toConfigure", String.class)
.invoke(null, agentArgs);
// JavaPid
final int javaPid = (Integer) classOfConfigure.getMethod("getJavaPid").invoke(objectOfConfigure);
// 获取GaServer单例
final Object objectOfGaServer = classOfGaServer
.getMethod("getInstance", int.class, Instrumentation.class)
.invoke(null, javaPid, inst);
// gaServer.isBind()
final boolean isBind = (Boolean) classOfGaServer.getMethod("isBind").invoke(objectOfGaServer);
if (!isBind) {
classOfGaServer.getMethod("bind", classOfConfigure).invoke(objectOfGaServer, objectOfConfigure);
}
} catch (Throwable t) {
t.printStackTrace();
}
} | #vulnerable code
private static synchronized void main(final String args, final Instrumentation inst) {
try {
// // 传递的args参数分两个部分:agentJar路径和agentArgs
// // 分别是Agent的JAR包路径和期望传递到服务端的参数
// final int index = args.indexOf(";");
// final String agentJar = args.substring(0, index);
// final String agentArgs = args.substring(index, args.length());
// 构造自定义的类加载器,尽量减少Greys对现有工程的侵蚀
final ClassLoader agentLoader = new URLClassLoader(new URL[]{new URL("file:" + JARFILE)}) {
// 这里还是放弃破坏双亲委派模型,因为接下来的编程模型太复杂
// @Override
// protected synchronized Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException {
// final Class<?> loadedClass = findLoadedClass(name);
// if (loadedClass != null) {
// return loadedClass;
// }
//
// try {
// Class<?> aClass = findClass(name);
// if (resolve) {
// resolveClass(aClass);
// }
// return aClass;
// } catch (Exception e) {
// return super.loadClass(name, resolve);
// }
// }
};
// Configure类定义
final Class<?> classOfConfigure = agentLoader.loadClass("com.github.ompc.greys.Configure");
// GaServer类定义
final Class<?> classOfGaServer = agentLoader.loadClass("com.github.ompc.greys.server.GaServer");
// 反序列化成Configure类实例
final Object objectOfConfigure = classOfConfigure.getMethod("toConfigure", String.class)
.invoke(null, args);
// JavaPid
final int javaPid = (Integer) classOfConfigure.getMethod("getJavaPid").invoke(objectOfConfigure);
// 获取GaServer单例
final Object objectOfGaServer = classOfGaServer
.getMethod("getInstance", int.class, Instrumentation.class)
.invoke(null, javaPid, inst);
// gaServer.isBind()
final boolean isBind = (Boolean) classOfGaServer.getMethod("isBind").invoke(objectOfGaServer);
if (!isBind) {
classOfGaServer.getMethod("bind", classOfConfigure).invoke(objectOfGaServer, objectOfConfigure);
}
} catch (Throwable t) {
t.printStackTrace();
}
}
#location 39
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private void write(long gaSessionId, String jobId, boolean isF, String message) {
if(isF){
message += endMark;
}
if(StringUtils.isEmpty(message)){
return;
}
RandomAccessFile rf = null;
try {
new File(executeResultDir).mkdir();
rf = new RandomAccessFile(getExecuteFilePath(jobId), "rw");
rf.seek(rf.length());
rf.write(message.getBytes());
rf.close();
} catch (IOException e) {
logger.warn("jobFile write error!",e);
return ;
} finally {
if( null != rf ) {
try {
rf.close();
}catch(Exception e) {
//
}
}
}
} | #vulnerable code
private void write(long gaSessionId, String jobId, boolean isF, String message) {
if(isF){
message += endMark;
}
if(StringUtils.isEmpty(message)){
return;
}
RandomAccessFile rf;
try {
new File(executeResultDir).mkdir();
rf = new RandomAccessFile(getExecuteFilePath(jobId), "rw");
rf.seek(rf.length());
rf.write(message.getBytes());
rf.close();
} catch (IOException e) {
logger.warn("jobFile write error!",e);
return ;
}
}
#location 18
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public static Configure toConfigure(String toString) {
final Configure configure = new Configure();
final String[] pvs = split(toString, ";");
for (String pv : pvs) {
try {
final String[] stringSplitArray = split(pv, "=");
final String p = stringSplitArray[0];
final String v = decode(stringSplitArray[1]);
final Field field = getField(Configure.class, p);
if( null != field ) {
set(field, valueOf(field.getType(), v), configure);
}
} catch (Throwable t) {
//
}
}
return configure;
} | #vulnerable code
public static Configure toConfigure(String toString) {
final Configure configure = new Configure();
final String[] pvs = split(toString, ";");
for (String pv : pvs) {
try {
final String[] stringSplitArray = split(pv, "=");
final String p = stringSplitArray[0];
final String v = decode(stringSplitArray[1]);
final Field field = getField(Configure.class, p);
set(field, valueOf(field.getType(), v), configure);
} catch (Throwable t) {
//
}
}
return configure;
}
#location 10
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public byte[] transform(
ClassLoader loader,
String className,
Class<?> classBeingRedefined,
ProtectionDomain protectionDomain,
byte[] classfileBuffer) throws IllegalClassFormatException {
// 这里要再次过滤一次,为啥?因为在transform的过程中,有可能还会再诞生新的类
// 所以需要将之前需要转换的类集合传递下来,再次进行判断
if (!matchingClasses.contains(classBeingRedefined)) {
return null;
}
final ClassReader cr;
// 首先先检查是否在缓存中存在Class字节码
// 因为要支持多人协作,存在多人同时增强的情况
final byte[] byteOfClassInCache = classBytesCache.get(classBeingRedefined);
if (null != byteOfClassInCache) {
cr = new ClassReader(byteOfClassInCache);
}
// 如果没有命中缓存,则从原始字节码开始增强
else {
cr = new ClassReader(classfileBuffer);
}
// 字节码增强
final ClassWriter cw = new ClassWriter(cr, /*COMPUTE_FRAMES |*/ COMPUTE_MAXS);
try {
// 生成增强字节码
cr.accept(new AdviceWeaver(adviceId, isTracing, cr.getClassName(), methodNameMatcher, affect, cw), SKIP_FRAMES);
final byte[] enhanceClassByteArray = cw.toByteArray();
// 生成成功,推入缓存
classBytesCache.put(classBeingRedefined, enhanceClassByteArray);
// 成功计数
affect.cCnt(1);
// // dump
// final java.io.OutputStream os = new java.io.FileOutputStream(new java.io.File("/tmp/AgentTest.class"));
// os.write(enhanceClassByteArray);
// os.flush();
// os.close();
return enhanceClassByteArray;
} catch (Throwable t) {
if (logger.isLoggable(WARNING)) {
logger.log(WARNING, format("transform class[%s] failed. ClassLoader=%s;", className, loader), t);
}
}
return null;
} | #vulnerable code
public byte[] transform(
ClassLoader loader,
String className,
Class<?> classBeingRedefined,
ProtectionDomain protectionDomain,
byte[] classfileBuffer) throws IllegalClassFormatException {
// 这里要再次过滤一次,为啥?因为在transform的过程中,有可能还会再诞生新的类
// 所以需要将之前需要转换的类集合传递下来,再次进行判断
if (!matchingClasses.contains(classBeingRedefined)) {
return null;
}
final ClassReader cr;
// 首先先检查是否在缓存中存在Class字节码
// 因为要支持多人协作,存在多人同时增强的情况
final byte[] byteOfClassInCache = classBytesCache.get(classBeingRedefined);
if (null != byteOfClassInCache) {
cr = new ClassReader(byteOfClassInCache);
}
// 如果没有命中缓存,则从原始字节码开始增强
else {
cr = new ClassReader(classfileBuffer);
}
// 字节码增强
final ClassWriter cw = new ClassWriter(cr, COMPUTE_FRAMES | COMPUTE_MAXS);
try {
// 生成增强字节码
cr.accept(new AdviceWeaver(adviceId, isTracing, cr.getClassName(), methodNameMatcher, affect, cw), SKIP_FRAMES);
final byte[] enhanceClassByteArray = cw.toByteArray();
// 生成成功,推入缓存
classBytesCache.put(classBeingRedefined, enhanceClassByteArray);
// 成功计数
affect.cCnt(1);
// dump
final java.io.OutputStream os = new java.io.FileOutputStream(new java.io.File("/tmp/AgentTest.class"));
os.write(enhanceClassByteArray);
os.flush();
os.close();
return enhanceClassByteArray;
} catch (Throwable t) {
if (logger.isLoggable(WARNING)) {
logger.log(WARNING, format("transform class[%s] failed. ClassLoader=%s;", className, loader), t);
}
}
return null;
}
#location 52
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Override
public String draw() {
return filterEmptyLine(tableView.draw());
} | #vulnerable code
@Override
public String draw() {
String content = tableView.draw();
StringBuilder sb = new StringBuilder();
// 清理多余的空格
Scanner scanner = new Scanner(content);
while (scanner.hasNextLine()) {
String line = scanner.nextLine();
if (line != null) {
//清理一行后面多余的空格
line = StringUtils.stripEnd(line, " ");
if(line.isEmpty()){
line = " ";
}
}
sb.append(line).append('\n');
}
scanner.close();
return sb.toString();
}
#location 18
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public byte[] transform(
ClassLoader loader,
String className,
Class<?> classBeingRedefined,
ProtectionDomain protectionDomain,
byte[] classfileBuffer) throws IllegalClassFormatException {
// 这里要再次过滤一次,为啥?因为在transform的过程中,有可能还会再诞生新的类
// 所以需要将之前需要转换的类集合传递下来,再次进行判断
if (!matchingClasses.contains(classBeingRedefined)) {
return null;
}
final ClassReader cr;
// 首先先检查是否在缓存中存在Class字节码
// 因为要支持多人协作,存在多人同时增强的情况
final byte[] byteOfClassInCache = classBytesCache.get(classBeingRedefined);
if (null != byteOfClassInCache) {
cr = new ClassReader(byteOfClassInCache);
}
// 如果没有命中缓存,则从原始字节码开始增强
else {
cr = new ClassReader(classfileBuffer);
}
// 字节码增强
final ClassWriter cw = new ClassWriter(cr, COMPUTE_FRAMES | COMPUTE_MAXS);
try {
// 生成增强字节码
cr.accept(new AdviceWeaver(adviceId, cr.getClassName(), methodNameMatcher, affect, cw), EXPAND_FRAMES);
final byte[] enhanceClassByteArray = cw.toByteArray();
// 生成成功,推入缓存
classBytesCache.put(classBeingRedefined, enhanceClassByteArray);
// 成功计数
affect.cCnt(1);
// // dump
// final java.io.OutputStream os = new FileOutputStream(new java.io.File("/tmp/AgentTest.class"));
// os.write(enhanceClassByteArray);
// os.flush();
// os.close();
return enhanceClassByteArray;
} catch (Throwable t) {
if (logger.isLoggable(WARNING)) {
logger.log(WARNING, format("transform class[%s] failed. ClassLoader=%s;", className, loader), t);
}
}
return null;
} | #vulnerable code
public byte[] transform(
ClassLoader loader,
String className,
Class<?> classBeingRedefined,
ProtectionDomain protectionDomain,
byte[] classfileBuffer) throws IllegalClassFormatException {
// 这里要再次过滤一次,为啥?因为在transform的过程中,有可能还会再诞生新的类
// 所以需要将之前需要转换的类集合传递下来,再次进行判断
if (!matchingClasses.contains(classBeingRedefined)) {
return null;
}
final ClassReader cr;
// 首先先检查是否在缓存中存在Class字节码
// 因为要支持多人协作,存在多人同时增强的情况
final byte[] byteOfClassInCache = classBytesCache.get(classBeingRedefined);
if (null != byteOfClassInCache) {
cr = new ClassReader(byteOfClassInCache);
}
// 如果没有命中缓存,则从原始字节码开始增强
else {
cr = new ClassReader(classfileBuffer);
}
// 字节码增强
final ClassWriter cw = new ClassWriter(cr, COMPUTE_FRAMES | COMPUTE_MAXS);
try {
// 生成增强字节码
cr.accept(new AdviceWeaver(adviceId, cr.getClassName(), methodNameMatcher, affect, cw), EXPAND_FRAMES);
final byte[] enhanceClassByteArray = cw.toByteArray();
// 生成成功,推入缓存
classBytesCache.put(classBeingRedefined, enhanceClassByteArray);
// 成功计数
affect.cCnt(1);
// dump
final java.io.OutputStream os = new FileOutputStream(new java.io.File("/tmp/AgentTest.class"));
os.write(enhanceClassByteArray);
os.flush();;
os.close();
return enhanceClassByteArray;
} catch (Throwable t) {
if (logger.isLoggable(WARNING)) {
logger.log(WARNING, format("transform class[%s] failed. ClassLoader=%s;", className, loader), t);
}
}
return null;
}
#location 52
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public byte[] transform(
final ClassLoader loader,
String className,
Class<?> classBeingRedefined,
ProtectionDomain protectionDomain,
byte[] classfileBuffer) throws IllegalClassFormatException {
// 这里要再次过滤一次,为啥?因为在transform的过程中,有可能还会再诞生新的类
// 所以需要将之前需要转换的类集合传递下来,再次进行判断
if (!matchingClasses.contains(classBeingRedefined)) {
return null;
}
final ClassReader cr;
// 首先先检查是否在缓存中存在Class字节码
// 因为要支持多人协作,存在多人同时增强的情况
final byte[] byteOfClassInCache = classBytesCache.get(classBeingRedefined);
if (null != byteOfClassInCache) {
cr = new ClassReader(byteOfClassInCache);
}
// 如果没有命中缓存,则从原始字节码开始增强
else {
cr = new ClassReader(classfileBuffer);
}
// 字节码增强
final ClassWriter cw = new ClassWriter(cr, COMPUTE_FRAMES | COMPUTE_MAXS) {
/*
* 注意,为了自动计算帧的大小,有时必须计算两个类共同的父类。
* 缺省情况下,ClassWriter将会在getCommonSuperClass方法中计算这些,通过在加载这两个类进入虚拟机时,使用反射API来计算。
* 但是,如果你将要生成的几个类相互之间引用,这将会带来问题,因为引用的类可能还不存在。
* 在这种情况下,你可以重写getCommonSuperClass方法来解决这个问题。
*
* 通过重写 getCommonSuperClass() 方法,更正获取ClassLoader的方式,改成使用指定ClassLoader的方式进行。
* 规避了原有代码采用Object.class.getClassLoader()的方式
*/
@Override
protected String getCommonSuperClass(String type1, String type2) {
Class<?> c, d;
final ClassLoader classLoader = loader;
try {
c = Class.forName(type1.replace('/', '.'), false, classLoader);
d = Class.forName(type2.replace('/', '.'), false, classLoader);
} catch (Exception e) {
throw new RuntimeException(e.toString());
}
if (c.isAssignableFrom(d)) {
return type1;
}
if (d.isAssignableFrom(c)) {
return type2;
}
if (c.isInterface() || d.isInterface()) {
return "java/lang/Object";
} else {
do {
c = c.getSuperclass();
} while (!c.isAssignableFrom(d));
return c.getName().replace('.', '/');
}
}
};
try {
// 生成增强字节码
cr.accept(new AdviceWeaver(adviceId, isTracing, cr.getClassName(), methodNameMatcher, affect, cw), EXPAND_FRAMES);
final byte[] enhanceClassByteArray = cw.toByteArray();
// 生成成功,推入缓存
classBytesCache.put(classBeingRedefined, enhanceClassByteArray);
// 成功计数
affect.cCnt(1);
// // dump
// final java.io.OutputStream os = new java.io.FileOutputStream(new java.io.File("/tmp/AgentTest.class"));
// os.write(enhanceClassByteArray);
// os.flush();
// os.close();
return enhanceClassByteArray;
} catch (Throwable t) {
logger.warn("transform loader[{}]:class[{}] failed.", loader, className, t);
}
return null;
} | #vulnerable code
public byte[] transform(
final ClassLoader loader,
String className,
Class<?> classBeingRedefined,
ProtectionDomain protectionDomain,
byte[] classfileBuffer) throws IllegalClassFormatException {
// 这里要再次过滤一次,为啥?因为在transform的过程中,有可能还会再诞生新的类
// 所以需要将之前需要转换的类集合传递下来,再次进行判断
if (!matchingClasses.contains(classBeingRedefined)) {
return null;
}
final ClassReader cr;
// 首先先检查是否在缓存中存在Class字节码
// 因为要支持多人协作,存在多人同时增强的情况
final byte[] byteOfClassInCache = classBytesCache.get(classBeingRedefined);
if (null != byteOfClassInCache) {
cr = new ClassReader(byteOfClassInCache);
}
// 如果没有命中缓存,则从原始字节码开始增强
else {
cr = new ClassReader(classfileBuffer);
}
// 字节码增强
final ClassWriter cw = new ClassWriter(cr, COMPUTE_FRAMES | COMPUTE_MAXS) {
/*
* 注意,为了自动计算帧的大小,有时必须计算两个类共同的父类。
* 缺省情况下,ClassWriter将会在getCommonSuperClass方法中计算这些,通过在加载这两个类进入虚拟机时,使用反射API来计算。
* 但是,如果你将要生成的几个类相互之间引用,这将会带来问题,因为引用的类可能还不存在。
* 在这种情况下,你可以重写getCommonSuperClass方法来解决这个问题。
*
* 通过重写 getCommonSuperClass() 方法,更正获取ClassLoader的方式,改成使用指定ClassLoader的方式进行。
* 规避了原有代码采用Object.class.getClassLoader()的方式
*/
@Override
protected String getCommonSuperClass(String type1, String type2) {
Class<?> c, d;
final ClassLoader classLoader = loader;
try {
c = Class.forName(type1.replace('/', '.'), false, classLoader);
d = Class.forName(type2.replace('/', '.'), false, classLoader);
} catch (Exception e) {
throw new RuntimeException(e.toString());
}
if (c.isAssignableFrom(d)) {
return type1;
}
if (d.isAssignableFrom(c)) {
return type2;
}
if (c.isInterface() || d.isInterface()) {
return "java/lang/Object";
} else {
do {
c = c.getSuperclass();
} while (!c.isAssignableFrom(d));
return c.getName().replace('.', '/');
}
}
};
try {
// 生成增强字节码
cr.accept(new AdviceWeaver(adviceId, isTracing, cr.getClassName(), methodNameMatcher, affect, cw), EXPAND_FRAMES);
final byte[] enhanceClassByteArray = cw.toByteArray();
// 生成成功,推入缓存
classBytesCache.put(classBeingRedefined, enhanceClassByteArray);
// 成功计数
affect.cCnt(1);
// dump
final java.io.OutputStream os = new java.io.FileOutputStream(new java.io.File("/tmp/AgentTest.class"));
os.write(enhanceClassByteArray);
os.flush();
os.close();
return enhanceClassByteArray;
} catch (Throwable t) {
logger.warn("transform loader[{}]:class[{}] failed.", loader, className, t);
}
return null;
}
#location 90
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private void read(String jobId, int pos, RespResult respResult) {
int newPos = pos;
final StringBuilder sb = new StringBuilder();
RandomAccessFile rf = null;
try {
rf = new RandomAccessFile(getExecuteFilePath(jobId), "r");
rf.seek(pos);
byte[] buffer = new byte[10000];
int len=0;
while ((len=rf.read(buffer))!=-1) {
newPos += len;
sb.append(new String(buffer,0,len));
}
respResult.setPos(newPos);
respResult.setMessage(sb.toString());
} catch (IOException e) {
logger.warn("jobFile read error!");
return ;
} finally {
if( null != rf ) {
try {
rf.close();
}catch(Exception e) {
//
}
}
}
} | #vulnerable code
private void read(String jobId, int pos, RespResult respResult) {
RandomAccessFile rf;
StringBuilder sb = new StringBuilder();
int newPos = pos;
try {
rf = new RandomAccessFile(getExecuteFilePath(jobId), "r");
rf.seek(pos);
byte[] buffer = new byte[10000];
int len=0;
while ((len=rf.read(buffer))!=-1) {
newPos += len;
sb.append(new String(buffer,0,len));
}
rf.close();
} catch (IOException e) {
logger.warn("jobFile read error!");
return ;
}
respResult.setPos(newPos);
respResult.setMessage(sb.toString());
}
#location 15
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public static void main(String[] args) throws Exception {
if (args.length != 3) {
printUsage();
return;
}
File manifestXml = new File(args[0]);
String moduleName = args[1];
File baseDir = new File(args[2]);
if (!manifestXml.exists()) {
System.out.println("No such file: " + manifestXml);
printUsage();
return;
}
if (!baseDir.isDirectory()) {
System.out.println("No such directory: " + baseDir);
printUsage();
return;
}
generate(manifestXml, moduleName, baseDir);
} | #vulnerable code
public static void main(String[] args) throws Exception {
if (args.length != 3) {
printUsage();
return;
}
File manifestXml = new File(args[0]);
String moduleName = args[1];
File baseDir = new File(args[2]);
if (!manifestXml.exists()) {
System.out.println("No such file: " + manifestXml);
printUsage();
return;
}
if (!baseDir.isDirectory()) {
System.out.println("No such directory: " + baseDir);
printUsage();
return;
}
ModuleGenerator moduleGenerator = new ModuleGenerator();
InputSource in = new InputSource(new FileInputStream(manifestXml));
Document document = moduleGenerator.manifestToDocument(in);
File file = moduleGenerator.path(document, moduleName, baseDir);
file.getParentFile().mkdirs();
JavaWriter out = new JavaWriter(new OutputStreamWriter(new FileOutputStream(file), "UTF-8"));
moduleGenerator.generate(document, moduleName, out);
out.close();
}
#location 5
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void validate() {
Map<String, Binding<?>> allBindings = linkEverything();
new ProblemDetector().detectProblems(allBindings.values());
} | #vulnerable code
public void validate() {
Map<String, Binding<?>> allBindings;
synchronized (linker) {
linkStaticInjections();
linkEntryPoints();
allBindings = linker.linkAll();
}
new ProblemDetector().detectProblems(allBindings.values());
}
#location 8
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public static String get(Type type, Annotation[] annotations, Object subject) {
return get(type, extractQualifier(annotations, subject));
} | #vulnerable code
public static String get(Type type, Annotation[] annotations, Object subject) {
Annotation qualifier = null;
for (Annotation a : annotations) {
if (!IS_QUALIFIER_ANNOTATION.get(a.annotationType())) {
continue;
}
if (qualifier != null) {
throw new IllegalArgumentException("Too many qualifier annotations on " + subject);
}
qualifier = a;
}
return get(type, qualifier);
}
#location 4
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public ObjectGraph plus(Object... modules) {
linkEverything();
return makeGraph(this, plugin, modules);
} | #vulnerable code
public ObjectGraph plus(Object... modules) {
linker.linkAll();
return makeGraph(this, plugin, modules);
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public static void main(String[] args) throws Exception {
if (args.length != 3) {
printUsage();
return;
}
File manifestXml = new File(args[0]);
String moduleName = args[1];
File baseDir = new File(args[2]);
if (!manifestXml.exists()) {
System.out.println("No such file: " + manifestXml);
printUsage();
return;
}
if (!baseDir.isDirectory()) {
System.out.println("No such directory: " + baseDir);
printUsage();
return;
}
generate(manifestXml, moduleName, baseDir);
} | #vulnerable code
public static void main(String[] args) throws Exception {
if (args.length != 3) {
printUsage();
return;
}
File manifestXml = new File(args[0]);
String moduleName = args[1];
File baseDir = new File(args[2]);
if (!manifestXml.exists()) {
System.out.println("No such file: " + manifestXml);
printUsage();
return;
}
if (!baseDir.isDirectory()) {
System.out.println("No such directory: " + baseDir);
printUsage();
return;
}
ModuleGenerator moduleGenerator = new ModuleGenerator();
InputSource in = new InputSource(new FileInputStream(manifestXml));
Document document = moduleGenerator.manifestToDocument(in);
File file = moduleGenerator.path(document, moduleName, baseDir);
file.getParentFile().mkdirs();
JavaWriter out = new JavaWriter(new OutputStreamWriter(new FileOutputStream(file), "UTF-8"));
moduleGenerator.generate(document, moduleName, out);
out.close();
}
#location 28
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public boolean lock(long seckillId) {
try {
if (threadLock.get() == null) {
Map<Long, InterProcessMutex> map = new HashMap();
map.put(seckillId,new InterProcessMutex(client,ROOT_LOCK_PATH+"/"+String.valueOf(seckillId)));
threadLock.set(map);
}else{
threadLock.get().get(seckillId).acquire(2L, TimeUnit.SECONDS);
}
return true;
} catch (Exception e) {
log.error(e.getMessage(), e);
return false;
}
} | #vulnerable code
public boolean lock(long seckillId) {
try {
if (lockMap.get(seckillId) == null) {
lockMap.put(seckillId, new InterProcessMutex(client, ROOT_LOCK_PATH+"/"+String.valueOf(seckillId)));
}
lockMap.get(seckillId).acquire(2L, TimeUnit.SECONDS);
return true;
} catch (Exception e) {
log.error(e.getMessage(), e);
return false;
}
}
#location 6
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public boolean lock(long seckillId) {
try {
Map<Long, InterProcessMutex> map;
String rootLockPath = "/goodskill";
Map<Long, InterProcessMutex> processMutexMap = threadLock.get();
if (processMutexMap.get(seckillId) == null) {
processMutexMap.put(seckillId, new InterProcessMutex(client, rootLockPath + "/" + seckillId));
}
boolean acquire = processMutexMap.get(seckillId).acquire(5000L, TimeUnit.MILLISECONDS);
if (log.isDebugEnabled()) {
log.debug("成功获取到zk锁,秒杀id{}", seckillId);
}
return acquire;
} catch (Exception e) {
log.warn("获取zk锁异常:{}", e.getMessage());
return false;
}
} | #vulnerable code
public boolean lock(long seckillId) {
try {
Map<Long, InterProcessMutex> map;
String rootLockPath = "/goodskill";
if (threadLock.get() == null) {
map = new ConcurrentHashMap();
map.put(seckillId, new InterProcessMutex(client, rootLockPath + "/" + seckillId));
threadLock.set(map);
} else {
if (threadLock.get().get(seckillId) == null) {
map = threadLock.get();
map.put(seckillId, new InterProcessMutex(client, rootLockPath + "/" + seckillId));
}
}
boolean acquire = threadLock.get().get(seckillId).acquire(5000L, TimeUnit.MILLISECONDS);
if (log.isDebugEnabled()) {
log.debug("成功获取到zk锁,秒杀id{}", seckillId);
}
return acquire;
} catch (Exception e) {
log.warn("获取zk锁异常:{}", e.getMessage());
return false;
}
}
#location 15
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private String uploadGoodsPhoto(CommonsMultipartFile file) throws IOException {
final String s = "/Users/heng/java学习/";
String path = s + file.getOriginalFilename();
FileOutputStream fos = null;
InputStream is = null;
try {
String filePath = s;
File file_tmp = new File(filePath);
if (!file_tmp.exists() && !file_tmp.mkdirs()) {
throw new HengException("dir create error!");
}
fos = new FileOutputStream(path);
is = file.getInputStream();
int b;
while ((b = is.read()) != -1) {
fos.write(b);
}
fos.flush();
} catch (IOException e) {
logger.error("error message is:", e);
throw new HengException("上传文件异常");
} finally {
if (fos!=null){
fos.close();
}
if (is!=null){
is.close();
}
}
return path;
} | #vulnerable code
private String uploadGoodsPhoto(CommonsMultipartFile file) {
final String s = "/Users/heng/java学习/";
String path = s + file.getOriginalFilename();
try {
String filePath = s;
File file_tmp = new File(filePath);
if (!file_tmp.exists() && !file_tmp.mkdirs()) {
throw new HengException("dir create error!");
}
FileOutputStream fos = new FileOutputStream(path);
InputStream is = file.getInputStream();
int b = 0;
while ((b = is.read()) != -1) {
fos.write(b);
}
fos.flush();
fos.close();
is.close();
} catch (IOException e) {
throw new HengException("上传文件异常");
}
return path;
}
#location 19
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public static void main(String[] args) {
SpringApplication.run(GoodsKillRpcServiceApplication.class);
} | #vulnerable code
public static void main(String[] args) {
log.info(">>>>> goodsKill-rpc-service 正在启动 <<<<<");
AbstractApplicationContext context= new ClassPathXmlApplicationContext(
"classpath*:META-INF/spring/spring-*.xml");
// 程序退出前优雅关闭JVM
context.registerShutdownHook();
context.start();
log.info(">>>>> goodsKill-rpc-service 启动完成 <<<<<");
}
#location 7
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public boolean lock(long seckillId) {
try {
Map<Long, InterProcessMutex> map;
String rootLockPath = "/goodskill";
Map<Long, InterProcessMutex> processMutexMap = threadLock.get();
if (processMutexMap.get(seckillId) == null) {
processMutexMap.put(seckillId, new InterProcessMutex(client, rootLockPath + "/" + seckillId));
}
boolean acquire = processMutexMap.get(seckillId).acquire(5000L, TimeUnit.MILLISECONDS);
if (log.isDebugEnabled()) {
log.debug("成功获取到zk锁,秒杀id{}", seckillId);
}
return acquire;
} catch (Exception e) {
log.warn("获取zk锁异常:{}", e.getMessage());
return false;
}
} | #vulnerable code
public boolean lock(long seckillId) {
try {
Map<Long, InterProcessMutex> map;
String rootLockPath = "/goodskill";
if (threadLock.get() == null) {
map = new ConcurrentHashMap();
map.put(seckillId, new InterProcessMutex(client, rootLockPath + "/" + seckillId));
threadLock.set(map);
} else {
if (threadLock.get().get(seckillId) == null) {
map = threadLock.get();
map.put(seckillId, new InterProcessMutex(client, rootLockPath + "/" + seckillId));
}
}
boolean acquire = threadLock.get().get(seckillId).acquire(5000L, TimeUnit.MILLISECONDS);
if (log.isDebugEnabled()) {
log.debug("成功获取到zk锁,秒杀id{}", seckillId);
}
return acquire;
} catch (Exception e) {
log.warn("获取zk锁异常:{}", e.getMessage());
return false;
}
}
#location 15
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test(expected = NullPointerException.class)
public void testCreateNull() {
new TemplateList(null,(String[]) null);
} | #vulnerable code
@Test(expected = NullPointerException.class)
public void testCreateNull() {
new PatternList((String[]) null);
}
#location 3
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void atExpr(Expr expr) throws CompileError {
// array access, member access,
// (unary) +, (unary) -, ++, --, !, ~
int token = expr.getOperator();
ASTree oprand = expr.oprand1();
if (token == '.') {
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("length"))
atArrayLength(expr);
else if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == MEMBER) { // field read
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == ARRAY)
atArrayRead(oprand, expr.oprand2());
else if (token == PLUSPLUS || token == MINUSMINUS)
atPlusPlus(token, oprand, expr);
else if (token == '!')
booleanExpr(expr);
else if (token == CALL) // method call
fatal();
else {
oprand.accept(this);
if (!isConstant(expr, token, oprand))
if (token == '-' || token == '~')
if (CodeGen.isP_INT(exprType))
exprType = INT; // type may be BYTE, ...
}
} | #vulnerable code
public void atExpr(Expr expr) throws CompileError {
// array access, member access,
// (unary) +, (unary) -, ++, --, !, ~
int token = expr.getOperator();
ASTree oprand = expr.oprand1();
if (token == '.') {
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("length"))
atArrayLength(expr);
else if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == MEMBER) { // field read
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == ARRAY)
atArrayRead(oprand, expr.oprand2());
else if (token == PLUSPLUS || token == MINUSMINUS)
atPlusPlus(token, oprand, expr);
else if (token == '!')
booleanExpr(expr);
else if (token == CALL) // method call
fatal();
else {
expr.oprand1().accept(this);
if (token == '-' || token == '~')
if (CodeGen.isP_INT(exprType))
exprType = INT; // type may be BYTE, ...
}
}
#location 24
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public InputStream openClassfile(String classname) {
try {
URLConnection con = openClassfile0(classname);
if (con != null)
return con.getInputStream();
}
catch (IOException e) {}
return null; // not found
} | #vulnerable code
public InputStream openClassfile(String classname) {
try {
if (packageName == null || classname.startsWith(packageName)) {
String jarname
= directory + classname.replace('.', '/') + ".class";
URLConnection con = fetchClass0(hostname, port, jarname);
return con.getInputStream();
}
}
catch (IOException e) {}
return null; // not found
}
#location 7
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void setSuperclass(String superclass)
throws CannotCompileException
{
if (superclass == null)
superclass = "java.lang.Object";
try {
superClass = constPool.addClassInfo(superclass);
LinkedList list = methods;
int n = list.size();
for (int i = 0; i < n; ++i) {
MethodInfo minfo = (MethodInfo)list.get(i);
minfo.setSuperclass(superclass);
}
}
catch (BadBytecode e) {
throw new CannotCompileException(e);
}
} | #vulnerable code
public void setSuperclass(String superclass)
throws CannotCompileException
{
if (constPool.getClassInfo(superClass).equals("java.lang.Object")) {
if (superclass != null)
try {
superClass = constPool.addClassInfo(superclass);
setSuperclass2(superclass);
}
catch (BadBytecode e) {
throw new CannotCompileException(e);
}
}
else {
if (superclass == null)
superclass = "java.lang.Object";
renameClass(constPool.getClassInfo(superClass), superclass);
}
}
#location 4
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void atExpr(Expr expr) throws CompileError {
// array access, member access,
// (unary) +, (unary) -, ++, --, !, ~
int token = expr.getOperator();
ASTree oprand = expr.oprand1();
if (token == '.') {
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("length"))
atArrayLength(expr);
else if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == MEMBER) { // field read
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == ARRAY)
atArrayRead(oprand, expr.oprand2());
else if (token == PLUSPLUS || token == MINUSMINUS)
atPlusPlus(token, oprand, expr);
else if (token == '!')
booleanExpr(expr);
else if (token == CALL) // method call
fatal();
else {
oprand.accept(this);
if (!isConstant(expr, token, oprand))
if (token == '-' || token == '~')
if (CodeGen.isP_INT(exprType))
exprType = INT; // type may be BYTE, ...
}
} | #vulnerable code
public void atExpr(Expr expr) throws CompileError {
// array access, member access,
// (unary) +, (unary) -, ++, --, !, ~
int token = expr.getOperator();
ASTree oprand = expr.oprand1();
if (token == '.') {
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("length"))
atArrayLength(expr);
else if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == MEMBER) { // field read
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == ARRAY)
atArrayRead(oprand, expr.oprand2());
else if (token == PLUSPLUS || token == MINUSMINUS)
atPlusPlus(token, oprand, expr);
else if (token == '!')
booleanExpr(expr);
else if (token == CALL) // method call
fatal();
else {
expr.oprand1().accept(this);
if (token == '-' || token == '~')
if (CodeGen.isP_INT(exprType))
exprType = INT; // type may be BYTE, ...
}
}
#location 8
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public CtField lookupField(ASTList className, Symbol fieldName)
throws CompileError
{
return lookupJavaField(Declarator.astToClassName(className, '.'),
fieldName);
} | #vulnerable code
public CtField lookupField(ASTList className, Symbol fieldName)
throws CompileError
{
return lookupField2(Declarator.astToClassName(className, '.'),
fieldName);
}
#location 4
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void atExpr(Expr expr) throws CompileError {
// array access, member access,
// (unary) +, (unary) -, ++, --, !, ~
int token = expr.getOperator();
ASTree oprand = expr.oprand1();
if (token == '.') {
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("length"))
atArrayLength(expr);
else if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == MEMBER) { // field read
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == ARRAY)
atArrayRead(oprand, expr.oprand2());
else if (token == PLUSPLUS || token == MINUSMINUS)
atPlusPlus(token, oprand, expr);
else if (token == '!')
booleanExpr(expr);
else if (token == CALL) // method call
fatal();
else {
oprand.accept(this);
if (!isConstant(expr, token, oprand))
if (token == '-' || token == '~')
if (CodeGen.isP_INT(exprType))
exprType = INT; // type may be BYTE, ...
}
} | #vulnerable code
public void atExpr(Expr expr) throws CompileError {
// array access, member access,
// (unary) +, (unary) -, ++, --, !, ~
int token = expr.getOperator();
ASTree oprand = expr.oprand1();
if (token == '.') {
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("length"))
atArrayLength(expr);
else if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == MEMBER) { // field read
String member = ((Symbol)expr.oprand2()).get();
if (member.equals("class"))
atClassObject(expr); // .class
else
atFieldRead(expr);
}
else if (token == ARRAY)
atArrayRead(oprand, expr.oprand2());
else if (token == PLUSPLUS || token == MINUSMINUS)
atPlusPlus(token, oprand, expr);
else if (token == '!')
booleanExpr(expr);
else if (token == CALL) // method call
fatal();
else {
expr.oprand1().accept(this);
if (token == '-' || token == '~')
if (CodeGen.isP_INT(exprType))
exprType = INT; // type may be BYTE, ...
}
}
#location 8
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
protected CtField fieldAccess(ASTree expr) throws CompileError {
if (expr instanceof Member) {
Member mem = (Member)expr;
String name = mem.get();
try {
CtField f = thisClass.getField(name);
if (Modifier.isStatic(f.getModifiers()))
mem.setField(f);
return f;
}
catch (NotFoundException e) {
// EXPR might be part of a static member access?
throw new NoFieldException(name, expr);
}
}
else if (expr instanceof Expr) {
Expr e = (Expr)expr;
int op = e.getOperator();
if (op == MEMBER) {
Member mem = (Member)e.oprand2();
CtField f
= resolver.lookupField(((Symbol)e.oprand1()).get(), mem);
mem.setField(f);
return f;
}
else if (op == '.')
try {
e.oprand1().accept(this);
if (exprType == CLASS && arrayDim == 0)
return resolver.lookupFieldByJvmName(className,
(Symbol)e.oprand2());
}
catch (NoFieldException nfe) {
if (nfe.getExpr() != e.oprand1())
throw nfe;
/* EXPR should be a static field.
* If EXPR might be part of a qualified class name,
* lookupFieldByJvmName2() throws NoFieldException.
*/
Member fname = (Member)e.oprand2();
String jvmClassName = nfe.getField();
CtField f = resolver.lookupFieldByJvmName2(jvmClassName,
fname, expr);
e.setOperator(MEMBER);
e.setOprand1(new Symbol(MemberResolver.jvmToJavaName(
jvmClassName)));
fname.setField(f);
return f;
}
}
throw new CompileError("bad filed access");
} | #vulnerable code
protected CtField fieldAccess(ASTree expr) throws CompileError {
if (expr instanceof Member) {
String name = ((Member)expr).get();
try {
return thisClass.getField(name);
}
catch (NotFoundException e) {
// EXPR might be part of a static member access?
throw new NoFieldException(name, expr);
}
}
else if (expr instanceof Expr) {
Expr e = (Expr)expr;
int op = e.getOperator();
if (op == MEMBER)
return resolver.lookupField(((Symbol)e.oprand1()).get(),
(Symbol)e.oprand2());
else if (op == '.')
try {
e.oprand1().accept(this);
if (exprType == CLASS && arrayDim == 0)
return resolver.lookupFieldByJvmName(className,
(Symbol)e.oprand2());
}
catch (NoFieldException nfe) {
if (nfe.getExpr() != e.oprand1())
throw nfe;
/* EXPR should be a static field.
* If EXPR might be part of a qualified class name,
* lookupFieldByJvmName2() throws NoFieldException.
*/
Symbol fname = (Symbol)e.oprand2();
return resolver.lookupFieldByJvmName2(nfe.getField(),
fname, expr);
}
}
throw new CompileError("bad filed access");
}
#location 16
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public ClassFile getClassFile2() {
ClassFile cfile = classfile;
if (cfile != null)
return cfile;
if (readCounter++ > READ_THRESHOLD) {
releaseClassFiles();
readCounter = 0;
}
if (rawClassfile != null) {
try {
classfile = new ClassFile(new DataInputStream(
new ByteArrayInputStream(rawClassfile)));
rawClassfile = null;
getCounter = GET_THRESHOLD;
return classfile;
}
catch (IOException e) {
throw new RuntimeException(e.toString(), e);
}
}
InputStream fin = null;
try {
fin = classPool.openClassfile(getName());
if (fin == null)
throw new NotFoundException(getName());
fin = new BufferedInputStream(fin);
ClassFile cf = new ClassFile(new DataInputStream(fin));
if (!cf.getName().equals(qualifiedName))
throw new RuntimeException("cannot find " + qualifiedName + ": "
+ cf.getName() + " found in "
+ qualifiedName.replace('.', '/') + ".class");
classfile = cf;
return cf;
}
catch (NotFoundException e) {
throw new RuntimeException(e.toString(), e);
}
catch (IOException e) {
throw new RuntimeException(e.toString(), e);
}
finally {
if (fin != null)
try {
fin.close();
}
catch (IOException e) {}
}
} | #vulnerable code
public ClassFile getClassFile2() {
ClassFile cfile = classfile;
if (cfile != null)
return cfile;
if (readCounter++ > READ_THRESHOLD) {
getCounter += 2;
releaseClassFiles();
readCounter = 0;
}
if (rawClassfile != null) {
try {
classfile = new ClassFile(new DataInputStream(
new ByteArrayInputStream(rawClassfile)));
rawClassfile = null;
getCounter = GET_THRESHOLD;
return classfile;
}
catch (IOException e) {
throw new RuntimeException(e.toString(), e);
}
}
InputStream fin = null;
try {
fin = classPool.openClassfile(getName());
if (fin == null)
throw new NotFoundException(getName());
fin = new BufferedInputStream(fin);
ClassFile cf = new ClassFile(new DataInputStream(fin));
if (!cf.getName().equals(qualifiedName))
throw new RuntimeException("cannot find " + qualifiedName + ": "
+ cf.getName() + " found in "
+ qualifiedName.replace('.', '/') + ".class");
classfile = cf;
return cf;
}
catch (NotFoundException e) {
throw new RuntimeException(e.toString(), e);
}
catch (IOException e) {
throw new RuntimeException(e.toString(), e);
}
finally {
if (fin != null)
try {
fin.close();
}
catch (IOException e) {}
}
}
#location 7
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public ClassFile getClassFile2() {
if (classfile != null)
return classfile;
InputStream fin = null;
try {
fin = classPool.openClassfile(getName());
if (fin == null)
throw new NotFoundException(getName());
classfile = new ClassFile(new DataInputStream(fin));
return classfile;
}
catch (NotFoundException e) {
throw new RuntimeException(e.toString());
}
catch (IOException e) {
throw new RuntimeException(e.toString());
}
finally {
if (fin != null)
try {
fin.close();
}
catch (IOException e) {}
}
} | #vulnerable code
public ClassFile getClassFile2() {
if (classfile != null)
return classfile;
try {
byte[] b = classPool.readSource(getName());
DataInputStream dis
= new DataInputStream(new ByteArrayInputStream(b));
return (classfile = new ClassFile(dis));
}
catch (NotFoundException e) {
throw new RuntimeException(e.toString());
}
catch (IOException e) {
throw new RuntimeException(e.toString());
}
catch (CannotCompileException e) {
throw new RuntimeException(e.toString());
}
}
#location 9
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void renameClass(String oldName, String newName) {
LongVector v = items;
int size = numOfItems;
classes = new HashMap(classes.size() * 2);
for (int i = 1; i < size; ++i) {
ConstInfo ci = (ConstInfo)v.elementAt(i);
ci.renameClass(this, oldName, newName);
ci.makeHashtable(this);
}
} | #vulnerable code
public void renameClass(String oldName, String newName) {
LongVector v = items;
int size = numOfItems;
for (int i = 1; i < size; ++i)
((ConstInfo)v.elementAt(i)).renameClass(this, oldName, newName);
}
#location 5
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void atBinExpr(BinExpr expr) throws CompileError {
int token = expr.getOperator();
int k = CodeGen.lookupBinOp(token);
if (k >= 0) {
/* arithmetic operators: +, -, *, /, %, |, ^, &, <<, >>, >>>
*/
if (token == '+') {
Expr e = atPlusExpr(expr);
if (e != null) {
/* String concatenation has been translated into
* an expression using StringBuffer.
*/
e = CallExpr.makeCall(Expr.make('.', e,
new Member("toString")), null);
expr.setOprand1(e);
expr.setOprand2(null); // <---- look at this!
className = jvmJavaLangString;
}
}
else {
ASTree left = expr.oprand1();
ASTree right = expr.oprand2();
left.accept(this);
int type1 = exprType;
right.accept(this);
if (!isConstant(expr, token, left, right))
computeBinExprType(expr, token, type1);
}
}
else {
/* equation: &&, ||, ==, !=, <=, >=, <, >
*/
booleanExpr(expr);
}
} | #vulnerable code
public void atBinExpr(BinExpr expr) throws CompileError {
int token = expr.getOperator();
int k = CodeGen.lookupBinOp(token);
if (k >= 0) {
/* arithmetic operators: +, -, *, /, %, |, ^, &, <<, >>, >>>
*/
if (token == '+') {
Expr e = atPlusExpr(expr);
if (e != null) {
/* String concatenation has been translated into
* an expression using StringBuffer.
*/
e = CallExpr.makeCall(Expr.make('.', e,
new Member("toString")), null);
expr.setLeft(e);
expr.setOprand2(null); // <---- look at this!
className = jvmJavaLangString;
}
}
else {
expr.oprand1().accept(this);
int type1 = exprType;
expr.oprand2().accept(this);
computeBinExprType(expr, token, type1);
}
}
else {
/* equation: &&, ||, ==, !=, <=, >=, <, >
*/
booleanExpr(expr);
}
}
#location 23
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private void atPlusPlus(int token, ASTree oprand, Expr expr)
throws CompileError
{
boolean isPost = oprand == null; // ++i or i++?
if (isPost)
oprand = expr.oprand2();
if (oprand instanceof Variable) {
Declarator d = ((Variable)oprand).getDeclarator();
exprType = d.getType();
arrayDim = d.getArrayDim();
}
else {
if (oprand instanceof Expr) {
Expr e = (Expr)oprand;
if (e.getOperator() == ARRAY) {
atArrayRead(e.oprand1(), e.oprand2());
// arrayDim should be 0.
int t = exprType;
if (t == INT || t == BYTE || t == CHAR || t == SHORT)
exprType = INT;
return;
}
}
atFieldPlusPlus(oprand);
}
} | #vulnerable code
private void atPlusPlus(int token, ASTree oprand, Expr expr)
throws CompileError
{
boolean isPost = oprand == null; // ++i or i++?
if (isPost)
oprand = expr.oprand2();
if (oprand instanceof Variable) {
Declarator d = ((Variable)oprand).getDeclarator();
exprType = d.getType();
arrayDim = d.getArrayDim();
}
else {
if (oprand instanceof Expr) {
Expr e = (Expr)oprand;
if (e.getOperator() == ARRAY) {
atArrayRead(expr.oprand1(), expr.oprand2());
// arrayDim should be 0.
int t = exprType;
if (t == INT || t == BYTE || t == CHAR || t == SHORT)
exprType = INT;
return;
}
}
atFieldPlusPlus(oprand);
}
}
#location 17
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void renameClass(Map classnames) {
LongVector v = items;
int size = numOfItems;
classes = new HashMap(classes.size() * 2);
for (int i = 1; i < size; ++i) {
ConstInfo ci = (ConstInfo)v.elementAt(i);
ci.renameClass(this, classnames);
ci.makeHashtable(this);
}
} | #vulnerable code
public void renameClass(Map classnames) {
LongVector v = items;
int size = numOfItems;
for (int i = 1; i < size; ++i)
((ConstInfo)v.elementAt(i)).renameClass(this, classnames);
}
#location 5
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.