text
stringlengths 1
2.05k
|
---|
MAP_NONE, Allocation.USAGE_SCRIPT);
Script.LaunchOptions lo = new Script.LaunchOptions();
lo.setX(0, width);
lo.setY(0, height);
mYuv420.forEach_doConvert(outAlloc, lo);
outAlloc.copyTo(outBitmap);
return outBitmap;
}
private float[] getFrame(ImageProxy imageProxy) {
@SuppressLint("UnsafeOptInUsageError")
Image image = imageProxy.getImage();
if (image == null) {
return null;
}
Bitmap imageBitmap = YUV_420_888_toRGB(image, image.getWidth(), image.getHeight());
imageProxy.close();
Bitmap cropImageBitmap = Bitmap.createBitmap(MODEL_INPUT_SIZE, MODEL_INPUT_SIZE, Bitmap.Config.ARGB_8888);
Matrix frameToCropTransform = getTransformationMatrix(imageBitmap.getWidth(), imageBitmap.getHeight(),
MODEL_INPUT_SIZE, MODEL_INPUT_SIZE, 0, true);
Canvas canvas = new Canvas(cropImageBitmap);
canvas.drawBitmap(imageBitmap, frameToCropTransform, null);
int[] pixelValues = new int[MODEL_INPUT_SIZE * MODEL_INPUT_SIZE];
cropImageBitmap.getPixels(pixelValues, 0, MODEL_INPUT_SIZE, 0, 0, MODEL_INPUT_SIZE, MODEL_INPUT_SIZE);
for (int j = 0; j < pixelValues.length; ++j) {
mCHW2[j * 3 + 0] = ((pixelValues[j] >> 16) & 0xFF) / 255.0f;
mCHW2[j * 3 + 1] = ((pixelValues[j] >> 8) & 0xFF) / 255.0f;
mCHW2[j * 3 + 2] = (pixelValues[j] & 0xFF) / 255.0f;
}
for (int k = 0; k < IMG_CHANNEL; ++k) {
for (int l = 0; l < MODEL_INPUT_SIZE; ++l) {
for (int m = 0; m < MODEL_INPUT_SIZE; ++m) {
int dst_index = m + MODEL_INPUT_SIZE * l + MODEL_INPUT_SIZE * MODEL_INPUT_SIZE * k;
int src_index = k + IMG_CHANNEL * m + IMG_CHANNEL * MODEL_INPUT_SIZE * l;
mCHW[dst_index] = mCHW2[src_index];
}
}
}
return mCHW;
}
@Override |
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
cameraProviderFuture = ProcessCameraProvider.getInstance(getActivity());
}
@SuppressLint({"RestrictedApi", "UnsafeExperimentalUsageError"})
@Nullable
@Override
public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
View v = inflater.inflate(R.layout.fragment_camera2_basic, container, false);
previewView = v.findViewById(R.id.textureView);
rs = RenderScript.create(getActivity());
mYuv420 = new ScriptC_yuv420888(rs);
cameraProviderFuture.addListener(() -> {
try {
ProcessCameraProvider cameraProvider = cameraProviderFuture.get();
bindPreview(cameraProvider);
} catch (ExecutionException | InterruptedException e) {
}
}, ContextCompat.getMainExecutor(getActivity()));
imageAnalysis = new ImageAnalysis.Builder()
.setTargetResolution(new Size(224, 224))
.setMaxResolution(new Size(300, 300))
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
.build();
imageAnalysis.setAnalyzer(threadPoolExecutor, image -> {
Log.e(TAG, "w: " + image.getWidth() + " h: " + image.getHeight());
if (mRunClassifier && isProcessingDone.tryAcquire()) {
long t1 = SystemClock.uptimeMillis();
float[] chw = getFrame(image);
if (chw != null) {
long t2 = SystemClock.uptimeMillis();
String[] results = inference(chw);
long t3 = SystemClock.uptimeMillis();
StringBuilder msgBuilder = new StringBuilder();
for (int l = 1; l < 5; l++) {
msgBuilder.append(results[l]).append("\n");
} |
String msg = msgBuilder.toString();
msg += "getFrame(): " + (t2 - t1) + "ms" + "\n";
msg += "inference(): " + (t3 - t2) + "ms" + "\n";
String finalMsg = msg;
this.getActivity().runOnUiThread(() -> {
mResultView.setText(String.format("model: %s \n %s", mCurModel, results[0]));
mInfoView.setText(finalMsg);
});
}
isProcessingDone.release();
}
image.close();
});
return v;
}
private void bindPreview(@NonNull ProcessCameraProvider cameraProvider) {
@SuppressLint("RestrictedApi") Preview preview = new Preview.Builder()
.setMaxResolution(new Size(800, 800))
.setTargetName("Preview")
.build();
preview.setSurfaceProvider(previewView.getPreviewSurfaceProvider());
CameraSelector cameraSelector =
new CameraSelector.Builder()
.requireLensFacing(CameraSelector.LENS_FACING_BACK)
.build();
Camera camera = cameraProvider.bindToLifecycle(this, cameraSelector, preview, imageAnalysis);
}
@Override
public void onDestroyView() {
threadPoolExecutor.shutdownNow();
super.onDestroyView();
}
private void setInputName(String modelName) {
if (modelName.equals("mobilenet_v2")) {
INPUT_NAME = "input_1";
} else if (modelName.equals("resnet18_v1")) {
INPUT_NAME = "data";
} else {
throw new RuntimeException("Model input may not be right. Please set INPUT_NAME here explicitly.");
}
}
/*
Load precompiled model on TVM graph executor and init the system.
*/
private class LoadModelAsyncTask extends AsyncTask<Void, Void, Integer> {
@Override
protected Integer doInBackground(Void... args) {
mRunClassifier = false; |
int modelIndex = mModelView.getCheckedItemPosition();
setInputName(models[modelIndex]);
String model = MODELS + "/" + models[modelIndex];
String labelFilename = MODEL_LABEL_FILE;
Log.i(TAG, "Reading labels from: " + model + "/" + labelFilename);
try {
labels = new JSONObject(new String(getBytesFromFile(assetManager, model + "/" + labelFilename)));
} catch (IOException | JSONException e) {
Log.e(TAG, "Problem reading labels name file!", e);
return -1;
}
String modelGraph;
String graphFilename = MODEL_GRAPH_FILE;
Log.i(TAG, "Reading json graph from: " + model + "/" + graphFilename);
try {
modelGraph = new String(getBytesFromFile(assetManager, model + "/" + graphFilename));
} catch (IOException e) {
Log.e(TAG, "Problem reading json graph file!", e);
return -1;
}
String libCacheFilePath;
String libFilename = EXE_GPU ? MODEL_CL_LIB_FILE : MODEL_CPU_LIB_FILE;
Log.i(TAG, "Uploading compiled function to cache folder");
try {
libCacheFilePath = getTempLibFilePath(libFilename);
byte[] modelLibByte = getBytesFromFile(assetManager, model + "/" + libFilename);
FileOutputStream fos = new FileOutputStream(libCacheFilePath);
fos.write(modelLibByte);
fos.close();
} catch (IOException e) {
Log.e(TAG, "Problem uploading compiled function!", e);
return -1;
}
byte[] modelParams;
try {
modelParams = getBytesFromFile(assetManager, model + "/" + MODEL_PARAM_FILE);
} catch (IOException e) {
Log.e(TAG, "Problem reading params file!", e);
return -1;
} |
Log.i(TAG, "creating java tvm device...");
Device tvmDev = EXE_GPU ? Device.opencl() : Device.cpu();
Log.i(TAG, "loading compiled functions...");
Log.i(TAG, libCacheFilePath);
Module modelLib = Module.load(libCacheFilePath);
Log.i(TAG, "getting graph executor create handle...");
Function runtimeCreFun = Function.getFunction("tvm.graph_executor.create");
Log.i(TAG, "creating graph executor...");
Log.i(TAG, "device type: " + tvmDev.deviceType);
Log.i(TAG, "device id: " + tvmDev.deviceId);
TVMValue runtimeCreFunRes = runtimeCreFun.pushArg(modelGraph)
.pushArg(modelLib)
.pushArg(tvmDev.deviceType)
.pushArg(tvmDev.deviceId)
.invoke();
Log.i(TAG, "as module...");
graphExecutorModule = runtimeCreFunRes.asModule();
Log.i(TAG, "getting graph executor load params handle...");
Function loadParamFunc = graphExecutorModule.getFunction("load_params");
Log.i(TAG, "loading params...");
loadParamFunc.pushArg(modelParams).invoke();
modelLib.release();
loadParamFunc.release();
runtimeCreFun.release();
mCurModel = model;
mRunClassifier = true;
return 0;
}
}
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http:
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tvm.android.androidcamerademo; |
import android.content.pm.PackageInfo; |
import android.content.pm.PackageManager; |
import android.os.Bundle; |
import android.widget.Toast; |
import androidx.annotation.NonNull; |
import androidx.appcompat.app.AppCompatActivity; |
import androidx.core.app.ActivityCompat; |
import androidx.core.content.ContextCompat;
public class MainActivity extends AppCompatActivity implements
ActivityCompat.OnRequestPermissionsResultCallback {
private static final int PERMISSIONS_REQUEST_CODE = 1;
private String[] getRequiredPermissions() {
try {
PackageInfo info = getPackageManager()
.getPackageInfo(getPackageName(), PackageManager.GET_PERMISSIONS);
String[] ps = info.requestedPermissions;
if (ps != null && ps.length > 0) {
return ps;
} else {
return new String[0];
}
} catch (Exception e) {
return new String[0];
}
}
private boolean allPermissionsGranted() {
for (String permission : getRequiredPermissions()) {
if (ContextCompat.checkSelfPermission(this, permission)
!= PackageManager.PERMISSION_GRANTED) {
return false;
}
}
return true;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (!allPermissionsGranted()) {
requestPermissions(getRequiredPermissions(), PERMISSIONS_REQUEST_CODE);
return;
}
startFragment();
}
private void startFragment() {
getSupportFragmentManager()
.beginTransaction()
.replace(R.id.container, Camera2BasicFragment.newInstance())
.commit();
}
@Override
public void onRequestPermissionsResult(
int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (allPermissionsGranted()) {
startFragment();
} else {
Toast.makeText(this, "Required permissions are not granted. App may not run", Toast.LENGTH_SHORT).show();
fi |
nish();
}
}
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http:
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*!
* \file tvm_runtime.h
* \brief Pack all tvm runtime source files
*/
/* Enable custom logging - this will cause TVM to use a custom implementation
* of tvm::runtime::detail::LogMessage. We use this to pass TVM log messages to
* Android logcat.
*/
namespace tvm {
namespace runtime {
namespace detail {
[[noreturn]] void LogFatalImpl(const std::string& file, int lineno, const std::string& message) {
std::string m = file + ":" + std::to_string(lineno) + ": " + message;
__android_log_write(ANDROID_LOG_FATAL, "TVM_RUNTIME", m.c_str());
throw InternalError(file, lineno, message);
}
void LogMessageImpl(const std::string& file, int lineno, int level, const std::string& message) {
std::string m = file + ":" + std::to_string(lineno) + ": " + message;
__android_log_write(ANDROID_LOG_DEBUG + level, "TVM_RUNTIME", m.c_str());
}
}
}
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Source: https://stackoverflow.com/questions/36212904/yuv-420-888-interpretation-on-samsung-galaxy-s7-camera2
#pragma version(1)
#pragma rs java_package_name(org.apache.tvm.android.androidcamerademo);
#pragma rs_fp_relaxed
int32_t width;
int32_t height;
uint picWidth, uvPixelStride, uvRowStride ;
rs_allocation ypsIn,uIn,vIn;
// The LaunchOptions ensure that the Kernel does not enter the padding zone of Y, so yRowStride can be ignored WITHIN the Kernel.
uchar4 __attribute__((kernel)) doConvert(uint32_t x, uint32_t y) {
// index for accessing the uIn's and vIn's
uint uvIndex= uvPixelStride * (x/2) + uvRowStride*(y/2);
// get the y,u,v values
uchar yps= rsGetElementAt_uchar(ypsIn, x, y);
uchar u= rsGetElementAt_uchar(uIn, uvIndex);
uchar v= rsGetElementAt_uchar(vIn, uvIndex);
// calc argb
int4 argb;
argb.r = yps + v * 1436 / 1024 - 179;
argb.g = yps -u * 46549 / 131072 + 44 -v * 93604 / 131072 + 91;
argb.b = yps +u * 1814 / 1024 - 227;
argb.a = 255;
uchar4 out = convert_uchar4(clamp(argb, 0, 255));
return out;
}
|
import logging |
import pathlib
from pathlib |
import Path
from typing |
import Union |
import os
from os |
import environ |
import json |
import tvm |
import tvm.relay as relay
from tvm.contrib |
import utils, ndk, graph_executor as runtime
from tvm.contrib.download |
import download_testdata, download
target = "llvm -mtriple=arm64-linux-android"
target_host = None
def del_dir(target: Union[Path, str], only_if_empty: bool = False):
target = Path(target).expanduser()
assert target.is_dir()
for p in sorted(target.glob("**/*"), reverse=True):
if not p.exists():
continue
p.chmod(0o666)
if p.is_dir():
p.rmdir()
else:
if only_if_empty:
raise RuntimeError(f"{p.parent} is not empty!")
p.unlink()
target.rmdir()
def get_model(model_name, batch_size=1):
if model_name == "resnet18_v1": |
import mxnet as mx
from mxnet |
import gluon
from mxnet.gluon.model_zoo |
import vision
gluon_model = vision.get_model(model_name, pretrained=True)
img_size = 224
data_shape = (batch_size, 3, img_size, img_size)
net, params = relay.frontend.from_mxnet(gluon_model, {"data": data_shape})
return (net, params)
elif model_name == "mobilenet_v2": |
import keras
from keras.applications.mobilenet_v2 |
import MobileNetV2
keras.backend.clear_session()
weights_url = "".join(
[
"https:
"mobilenet_v2_keras/releases/download/v1.1/",
"mobilenet_v2_weights_tf_dim_ordering_tf_kernels_0.5_224.h5",
]
)
weights_file = "mobilenet_v2_weights.h5"
weights_path = download_testdata(weights_url, weights_file, module="keras")
keras_mobilenet_v2 = MobileNetV2(
alpha=0.5, include_top=True, weights=None, input_shape=(224, 224, 3), classes=1000
)
keras_mobilenet_v2.load_weights(weights_path)
img_size = 224
data_shape = (batch_size, 3, img_size, img_size)
mod, params = relay.frontend.from_keras(keras_mobilenet_v2, {"input_1": data_shape})
return (mod, params)
def main(model_str, output_path):
if output_path.exists():
del_dir(output_path)
output_path.mkdir()
output_path_str = os.fspath(output_path)
print(model_str)
print("getting model...")
net, params = get_model(model_str)
try:
os.mkdir(model_str)
except FileExistsError:
pass
print("building...")
with tvm.transform.PassContext(opt_level=3):
graph, lib, params = relay.build(net, tvm.target.Target(target, target_host), params=params)
print("dumping lib...")
lib.export_library(output_path_str + "/" + "deploy_lib_cpu.so", ndk.create_shared)
print("dumping graph...")
with open(output_path_str + "/" + "deploy_graph.json", "w") as f:
f.write(graph)
print("dumping params...")
with open(output_path_str + "/" + "deploy_param.params", "wb") as f:
f.write(tvm.runtime.save_param_dict(params))
print("dumping labels...")
synset_url = "".join(
[
"https:
"4d0b62f3d01426887599d4f7ede23ee5/raw/",
"596b27d23537e5a1b5751d2b0481ef172f58b539/",
"imagenet1000_clsid_to_human.txt",
]
)
synset_path = output_path_str + "/image_net_labels" |
download(synset_url, output_path_str + "/image_net_labels")
with open(synset_path) as fi:
synset = eval(fi.read())
with open(output_path_str + "/image_net_labels.json", "w") as fo:
json.dump(synset, fo, indent=4)
os.remove(synset_path)
if __name__ == "__main__":
if environ.get("TVM_NDK_CC") is None:
raise RuntimeError("Require environment variable TVM_NDK_CC")
models_path = Path().absolute().parent.joinpath("app/src/main/assets/models/")
if not models_path.exists():
models_path.mkdir()
models = {
"mobilenet_v2": models_path.joinpath("mobilenet_v2"),
"resnet18_v1": models_path.joinpath("resnet18_v1"),
}
for model, output_path in models.items():
main(model, output_path) |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http:
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tvm.android.demo; |
import android.Manifest; |
import android.content.Intent; |
import android.content.pm.PackageManager; |
import android.content.res.AssetManager; |
import android.app.AlertDialog; |
import android.app.ProgressDialog; |
import android.content.DialogInterface; |
import android.graphics.Bitmap; |
import android.graphics.BitmapFactory; |
import android.graphics.Canvas; |
import android.graphics.Matrix; |
import android.net.Uri; |
import android.os.AsyncTask; |
import android.os.Build; |
import android.os.Bundle; |
import android.os.Environment; |
import android.os.SystemClock; |
import android.provider.MediaStore; |
import androidx.core.content.FileProvider; |
import androidx.appcompat.app.AppCompatActivity; |
import androidx.appcompat.widget.Toolbar; |
import android.util.Log; |
import android.view.View; |
import android.widget.ImageView; |
import android.widget.TextView; |
import android.widget.Toast; |
import java.io.ByteArrayOutputStream; |
import java.io.File; |
import java.io.FileOutputStream; |
import java.io.InputStream; |
import java.io.IOException; |
import java.text.SimpleDateFormat; |
import java.util.Date; |
import java.util.Locale; |
import java.util.Vector; |
import org.apache.tvm.Function; |
import org.apache.tvm.Module; |
import org.apache.tvm.NDArray; |
import org.apache.tvm.Device; |
import org.apache.tvm.TVMValue; |
import org.apache.tvm.TVMType;
public class MainActivity extends AppCompatActivity {
private static final String TAG = MainActivity.class.getSimpleName();
private static final int PERMISSIONS_REQUEST = 100;
private static final int PICTURE_FROM_GALLERY = 101;
private static final int PICTURE_FROM_CAMERA = 102;
private static final int IMAGE_PREVIEW_WIDTH = 960;
private static final int IMAGE_PREVIEW_HEIGHT = 720;
private static final int OUTPUT_INDEX = 0;
private static final int IMG_CHANNEL = 3;
private static final String INPUT_NAME = "data";
private static final boolean EXE_GPU = false;
private static final int MODEL_INPUT_SIZE = 224;
private static final String MODEL_CL_LIB_FILE = "file:
private static final String MODEL_CPU_LIB_FILE = "file:
private static final String MODEL_GRAPH_FILE = "file:
private static final String MODEL_PARAM_FILE = "file:
private static final String MODEL_LABEL_FILE = "file:
private Uri mCameraImageUri;
private ImageView mImageView;
private TextView mResultView;
private AssetManager assetManager;
private Module graphExecutorModule;
private Vector<String> labels = new Vector<String>();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Toolbar toolbar = findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
assetManager = getAssets();
mImageView = (ImageView) findViewById(R.id.imageView);
mResultView = (TextView) findViewById(R.id.resultTextView);
findViewById(R.id.btnPickImage).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
showPictureDialog();
}
});
if (hasPermission()) {
new LoadModleAsyncTask().exec |
ute();
} else {
requestPermission();
}
}
/*
Load precompiled model on TVM graph executor and init the system.
*/
private class LoadModleAsyncTask extends AsyncTask<Void, Void, Integer> {
ProgressDialog dialog = new ProgressDialog(MainActivity.this);
@Override
protected Integer doInBackground(Void... args) {
String lableFilename = MODEL_LABEL_FILE.split("file:
Log.i(TAG, "Reading synset name from: " + lableFilename);
try {
String labelsContent = new String(getBytesFromFile(assetManager, lableFilename));
for (String line : labelsContent.split("\\r?\\n")) {
labels.add(line);
}
} catch (IOException e) {
Log.e(TAG, "Problem reading synset name file!" + e);
return -1;
}
String modelGraph = null;
String graphFilename = MODEL_GRAPH_FILE.split("file:
Log.i(TAG, "Reading json graph from: " + graphFilename);
try {
modelGraph = new String(getBytesFromFile(assetManager, graphFilename));
} catch (IOException e) {
Log.e(TAG, "Problem reading json graph file!" + e);
return -1;
}
String libCacheFilePath = null;
String libFilename = EXE_GPU ? MODEL_CL_LIB_FILE.split("file:
MODEL_CPU_LIB_FILE.split("file:
Log.i(TAG, "Uploading compiled function to cache folder");
try {
libCacheFilePath = getTempLibFilePath(libFilename);
byte[] modelLibByte = getBytesFromFile(assetManager, libFilename);
FileOutputStream fos = new FileOutputStream(libCacheFilePath);
fos.write(modelLibByte);
fos.close();
} catch (IOException e) {
Log.e(TAG, "Problem uploading compiled function!" + e); |
return -1;
}
byte[] modelParams = null;
String paramFilename = MODEL_PARAM_FILE.split("file:
try {
modelParams = getBytesFromFile(assetManager, paramFilename);
} catch (IOException e) {
Log.e(TAG, "Problem reading params file!" + e);
return -1;
}
Device tvmDev = EXE_GPU ? Device.opencl() : Device.cpu();
Module modelLib = Module.load(libCacheFilePath);
Function runtimeCreFun = Function.getFunction("tvm.graph_executor.create");
TVMValue runtimeCreFunRes = runtimeCreFun.pushArg(modelGraph)
.pushArg(modelLib)
.pushArg(tvmDev.deviceType)
.pushArg(tvmDev.deviceId)
.invoke();
graphExecutorModule = runtimeCreFunRes.asModule();
Function loadParamFunc = graphExecutorModule.getFunction("load_params");
loadParamFunc.pushArg(modelParams).invoke();
modelLib.release();
loadParamFunc.release();
runtimeCreFun.release();
return 0;
}
@Override
protected void onPreExecute() {
dialog.setCancelable(false);
dialog.setMessage("Loading Model...");
dialog.show();
super.onPreExecute();
}
@Override
protected void onPostExecute(Integer status) {
if (dialog != null && dialog.isShowing()) {
dialog.dismiss();
}
if (status != 0) {
showDialog("Error", "Fail to initialized model, check compiled model");
}
}
}
/*
Execute prediction for processed decode input bitmap image content on TVM graph executor.
*/
private class ModelRunAsyncTask extends AsyncTask<Bitmap, Void, Integer> {
ProgressDialog dialog = new Pr |
ogressDialog(MainActivity.this);
@Override
protected Integer doInBackground(Bitmap... bitmaps) {
if (null != graphExecutorModule) {
int count = bitmaps.length;
for (int i = 0 ; i < count ; i++) {
long processingTimeMs = SystemClock.uptimeMillis();
Log.i(TAG, "Decode JPEG image content");
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bitmaps[i].compress(Bitmap.CompressFormat.JPEG,100,stream);
byte[] byteArray = stream.toByteArray();
Bitmap imageBitmap = BitmapFactory.decodeByteArray(byteArray, 0, byteArray.length);
Bitmap cropImageBitmap = Bitmap.createBitmap(MODEL_INPUT_SIZE, MODEL_INPUT_SIZE, Bitmap.Config.ARGB_8888);
Matrix frameToCropTransform = getTransformationMatrix(imageBitmap.getWidth(), imageBitmap.getHeight(),
MODEL_INPUT_SIZE, MODEL_INPUT_SIZE, 0, true);
Canvas canvas = new Canvas(cropImageBitmap);
canvas.drawBitmap(imageBitmap, frameToCropTransform, null);
int[] pixelValues = new int[MODEL_INPUT_SIZE * MODEL_INPUT_SIZE];
float[] imgRgbValues = new float[MODEL_INPUT_SIZE * MODEL_INPUT_SIZE * IMG_CHANNEL];
float[] imgRgbTranValues = new float[MODEL_INPUT_SIZE * MODEL_INPUT_SIZE * IMG_CHANNEL];
cropImageBitmap.getPixels(pixelValues, 0, MODEL_INPUT_SIZE, 0, 0, MODEL_INPUT_SIZE, MODEL_INPUT_SIZE);
for (int j = 0; j < pixelValues.length; ++j) {
imgRgbValues[j * 3 + 0] = ((pixelValues[j] >> 16) & 0xFF)/255.0f;
imgRgbValues[j * 3 + 1] = ((pixelValues[j] >> 8) & 0xFF)/255.0f; |
imgRgbValues[j * 3 + 2] = (pixelValues[j] & 0xFF)/255.0f;
}
for (int k = 0; k < IMG_CHANNEL; ++k) {
for (int l = 0; l < MODEL_INPUT_SIZE; ++l) {
for (int m = 0; m < MODEL_INPUT_SIZE; ++m) {
int dst_index = m + MODEL_INPUT_SIZE*l + MODEL_INPUT_SIZE*MODEL_INPUT_SIZE*k;
int src_index = k + IMG_CHANNEL*m + IMG_CHANNEL*MODEL_INPUT_SIZE*l;
imgRgbTranValues[dst_index] = imgRgbValues[src_index];
}
}
}
Log.i(TAG, "set input data");
NDArray inputNdArray = NDArray.empty(new long[]{1, IMG_CHANNEL, MODEL_INPUT_SIZE, MODEL_INPUT_SIZE}, new TVMType("float32"));;
inputNdArray.copyFrom(imgRgbTranValues);
Function setInputFunc = graphExecutorModule.getFunction("set_input");
setInputFunc.pushArg(INPUT_NAME).pushArg(inputNdArray).invoke();
inputNdArray.release();
setInputFunc.release();
Log.i(TAG, "run function on target");
Function runFunc = graphExecutorModule.getFunction("run");
runFunc.invoke();
runFunc.release();
Log.i(TAG, "get output data");
NDArray outputNdArray = NDArray.empty(new long[]{1, 1000}, new TVMType("float32"));
Function getOutputFunc = graphExecutorModule.getFunction("get_output");
getOutputFunc.pushArg(OUTPUT_INDEX).pushArg(outputNdArray).invoke();
float[] output = outputNdArray.asFloatArray();
outputNdArray.release();
getOutputFunc. |
release();
if (null != output) {
int maxPosition = -1;
float maxValue = 0;
for (int j = 0; j < output.length; ++j) {
if (output[j] > maxValue) {
maxValue = output[j];
maxPosition = j;
}
}
processingTimeMs = SystemClock.uptimeMillis() - processingTimeMs;
String label = "Prediction Result : ";
label += labels.size() > maxPosition ? labels.get(maxPosition) : "unknown";
label += "\nPrediction Time : " + processingTimeMs + "ms";
mResultView.setText(label);
}
Log.i(TAG, "prediction finished");
}
return 0;
}
return -1;
}
@Override
protected void onPreExecute() {
dialog.setCancelable(false);
dialog.setMessage("Prediction running on image...");
dialog.show();
super.onPreExecute();
}
@Override
protected void onPostExecute(Integer status) {
if (dialog != null && dialog.isShowing()) {
dialog.dismiss();
}
if (status != 0) {
showDialog("Error", "Fail to predict image, GraphExecutor exception");
}
}
}
@Override
protected void onDestroy() {
if (null != graphExecutorModule)
graphExecutorModule.release();
super.onDestroy();
}
/**
* Read file from assets and return byte array.
*
* @param assets The asset manager to be used to load assets.
* @param fileName The filepath of read file.
* @return byte[] file content
* @throws IOException
*/
private byte[] getBytesFromFile(AssetManager assets, |
String fileName) throws IOException {
InputStream is = assets.open(fileName);
int length = is.available();
byte[] bytes = new byte[length];
int offset = 0;
int numRead = 0;
try {
while (offset < bytes.length
&& (numRead = is.read(bytes, offset, bytes.length - offset)) >= 0) {
offset += numRead;
}
} finally {
is.close();
}
if (offset < bytes.length) {
throw new IOException("Could not completely read file " + fileName);
}
return bytes;
}
/**
* Dialog show pick option for select image from Gallery or Camera.
*/
private void showPictureDialog(){
AlertDialog.Builder pictureDialog = new AlertDialog.Builder(this);
pictureDialog.setTitle("Select Action");
String[] pictureDialogItems = {
"Select photo from gallery",
"Capture photo from camera" };
pictureDialog.setItems(pictureDialogItems,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
switch (which) {
case 0:
choosePhotoFromGallery();
break;
case 1:
takePhotoFromCamera();
break;
}
}
});
pictureDialog.show();
}
/**
* Request to pick image from Gallery.
*/
public void choosePhotoFromGallery() {
Intent galleryIntent = new Intent(Intent.ACTION_PICK,
android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
startActivityForResult(galleryIntent, PICTURE_FROM_GALLERY);
}
/**
* Request to capture image from Camera.
*/
private void takePhot |
oFromCamera() {
Intent intent = new Intent(android.provider.MediaStore.ACTION_IMAGE_CAPTURE);
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
mCameraImageUri = Uri.fromFile(createImageFile());
} else {
File file = new File(createImageFile().getPath());
mCameraImageUri = FileProvider.getUriForFile(getApplicationContext(), getApplicationContext().getPackageName() + ".provider", file);
}
intent.putExtra(MediaStore.EXTRA_OUTPUT, mCameraImageUri);
startActivityForResult(intent, PICTURE_FROM_CAMERA);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == this.RESULT_CANCELED) {
return;
}
Uri contentURI = null;
if (requestCode == PICTURE_FROM_GALLERY) {
if (data != null) {
contentURI = data.getData();
}
} else if (requestCode == PICTURE_FROM_CAMERA) {
contentURI = mCameraImageUri;
}
if (null != contentURI) {
try {
Bitmap bitmap = MediaStore.Images.Media.getBitmap(this.getContentResolver(), contentURI);
Bitmap scaled = Bitmap.createScaledBitmap(bitmap, IMAGE_PREVIEW_HEIGHT, IMAGE_PREVIEW_WIDTH, true);
mImageView.setImageBitmap(scaled);
new ModelRunAsyncTask().execute(scaled);
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* Get application cache path where to place compiled functions.
*
* @param fileName library file name.
* @return String application cache folder path
* @throws IOException
*/
private final String getTempLibFilePath(String fileName) throws IOException {
File tempDir = File.createTempFile("tvm4j_demo_", "");
if (!tempDir.delete() || !tempDir.mkdir()) {
throw new IOExcepti |
on("Couldn't create directory " + tempDir.getAbsolutePath());
}
return (tempDir + File.separator + fileName);
}
/**
* Create image file under storage where camera application save captured image.
*
* @return File image file under sdcard where camera can save image
*/
private File createImageFile() {
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss", Locale.US).format(new Date());
String imageFileName = "JPEG_" + timeStamp + "_";
File storageDir = Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES);
try {
File image = File.createTempFile(
imageFileName,
".jpg",
storageDir
);
return image;
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
/**
* Show dialog to user.
*
* @param title dialog display title
* @param msg dialog display message
*/
private void showDialog(String title, String msg) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(title);
builder.setMessage(msg);
builder.setCancelable(true);
builder.setNeutralButton(android.R.string.ok,
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
finish();
}
});
builder.create().show();
}
@Override
public void onRequestPermissionsResult (final int requestCode, final String[] permissions, final int[] grantResults){
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == PERMISSIONS_REQUEST) {
if (grantResults.length > 0
&& grantResults[0] == PackageManager.PERMISSION_GRANTED |
&& grantResults[1] == PackageManager.PERMISSION_GRANTED) {
new LoadModleAsyncTask().execute();
} else {
requestPermission();
}
}
}
/**
* Whether application has required mandatory permissions to run.
*/
private boolean hasPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
return checkSelfPermission(Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED &&
checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED;
} else {
return true;
}
}
/**
* Request required mandatory permission for application to run.
*/
private void requestPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA) ||
shouldShowRequestPermissionRationale(Manifest.permission.WRITE_EXTERNAL_STORAGE)) {
Toast.makeText(this,
"Camera AND storage permission are required for this demo", Toast.LENGTH_LONG).show();
}
requestPermissions(new String[] {Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE}, PERMISSIONS_REQUEST);
}
}
/**
* Returns a transformation matrix from one reference frame into another.
* Handles cropping (if maintaining aspect ratio is desired) and rotation.
*
* @param srcWidth Width of source frame.
* @param srcHeight Height of source frame.
* @param dstWidth Width of destination frame.
* @param dstHeight Height of destination frame.
* @param applyRotation Amount of rotation to apply from one frame to another.
* Must be a multiple of 90.
* @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant,
* cropping the image if necessary.
* @return The transform |
ation fulfilling the desired requirements.
*/
public static Matrix getTransformationMatrix(
final int srcWidth,
final int srcHeight,
final int dstWidth,
final int dstHeight,
final int applyRotation,
final boolean maintainAspectRatio) {
final Matrix matrix = new Matrix();
if (applyRotation != 0) {
if (applyRotation % 90 != 0) {
Log.w(TAG, "Rotation of %d % 90 != 0 " + applyRotation);
}
matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f);
matrix.postRotate(applyRotation);
}
final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0;
final int inWidth = transpose ? srcHeight : srcWidth;
final int inHeight = transpose ? srcWidth : srcHeight;
if (inWidth != dstWidth || inHeight != dstHeight) {
final float scaleFactorX = dstWidth / (float) inWidth;
final float scaleFactorY = dstHeight / (float) inHeight;
if (maintainAspectRatio) {
final float scaleFactor = Math.max(scaleFactorX, scaleFactorY);
matrix.postScale(scaleFactor, scaleFactor);
} else {
matrix.postScale(scaleFactorX, scaleFactorY);
}
}
if (applyRotation != 0) {
matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f);
}
return matrix;
}
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*!
* \file tvm_runtime.h
* \brief Pack all tvm runtime source files
*/
#include <sys/stat.h>
#include <fstream>
#define DMLC_USE_LOGGING_LIBRARY <tvm/runtime/logging.h>
#define TVM_USE_LIBBACKTRACE 0
#include "../src/runtime/c_runtime_api.cc"
#include "../src/runtime/cpu_device_api.cc"
#include "../src/runtime/dso_library.cc"
#include "../src/runtime/file_utils.cc"
#include "../src/runtime/graph_executor/graph_executor.cc"
#include "../src/runtime/library_module.cc"
#include "../src/runtime/logging.cc"
#include "../src/runtime/module.cc"
#include "../src/runtime/ndarray.cc"
#include "../src/runtime/object.cc"
#include "../src/runtime/registry.cc"
#include "../src/runtime/system_library.cc"
#include "../src/runtime/thread_pool.cc"
#include "../src/runtime/threading_backend.cc"
#include "../src/runtime/workspace_pool.cc"
#ifdef TVM_OPENCL_RUNTIME
#include "../src/runtime/opencl/opencl_device_api.cc"
#include "../src/runtime/opencl/opencl_module.cc"
#endif
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http:
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tvm.tvmrpc; |
import android.app.AlertDialog; |
import android.content.Context; |
import android.content.DialogInterface; |
import android.content.SharedPreferences; |
import android.os.Bundle; |
import android.os.Handler; |
import android.os.Looper; |
import androidx.appcompat.app.AppCompatActivity; |
import androidx.appcompat.widget.Toolbar; |
import android.widget.CompoundButton; |
import android.widget.EditText; |
import androidx.appcompat.widget.SwitchCompat; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.