i am trying to implement an app that takes pics from native camera and display the result in an imageView. the code that i used is very inconsistent. when i run the app, it sometimes shows the image in the imageView else doesn't show the result at all.
the code that i used is as follows:
this function opens the camera app
Intent cameraIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
if (cameraIntent.resolveActivity(getPackageManager()) != null) {
File photoFile = null;
try {
photoFile = createImageFile();
} catch (IOException ex) {
Log.i(TAG, "IOException");
}
if (photoFile != null) {
cameraIntent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(photoFile));
startActivityForResult(cameraIntent, REQUEST_IMAGE_CAPTURE);
}
}
this function is used to save the image the the device memory
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String imageFileName = "JPEG_" + timeStamp + "_";
File storageDir = Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES);
File image = File.createTempFile(
imageFileName,
".jpg",
storageDir
);
mCurrentPhotoPath = "file:" + image.getAbsolutePath();
return image;
protected void onActivityResult(int requestCode, int resultCode, Intent intent) {
if (requestCode == REQUEST_IMAGE_CAPTURE && resultCode == RESULT_OK) {
try {
if (mImageBitmap == null) {
Toast.makeText(getApplicationContext(), "Unable to Process. Please adjust camera Resolution", Toast.LENGTH_SHORT).show();
} else {
mImageBitmap = MediaStore.Images.Media.getBitmap(this.getContentResolver(), Uri.parse(mCurrentPhotoPath));
imageView.setImageBitmap(mImageBitmap);
}
} catch (IOException e) {
e.printStackTrace();
}
}
}