Android OpenCV 예제 - SURF를 사용한 오브젝트 검출 테스트OpenCV/Android 개발 환경 및 예제2019. 5. 21. 20:32
Table of Contents
반응형
안드로이드에서 Surf를 사용하여 이미지를 매칭하는 예제입니다.
우선 다음 포스트 내용을 진행하여 안드로이드용 OpenCV를 새로 빌드해서 사용해야 합니다.
Android용 OpenCV 빌드하는 방법(contrib 포함)
https://webnautes.tistory.com/1268
이후 다음 영상을 따라 진행하세요..
1. styles.xml
<!-- No Title Bar-->
<item name="windowActionBar">false</item>
<item name="windowNoTitle">true</item>
2. activity_main.xml
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="match_parent">
<ImageView
android:layout_width="match_parent"
android:layout_height="0dp"
android:layout_weight="0.45"
android:id="@+id/imageViewObject"
app:srcCompat="@drawable/ic_image"/>
<ImageView
android:layout_width="match_parent"
android:layout_height="0dp"
android:layout_weight="0.45"
android:id="@+id/imageViewScene"
app:srcCompat="@drawable/ic_image"/>
<Button
android:id="@+id/button"
android:layout_width="match_parent"
android:layout_height="0dp"
android:layout_weight="0.1"
android:text="Surf" />
</LinearLayout>
3. image.svg
4. AndroidManifest.xml
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
5. MainActivity.java
androidx라면 다음 세줄을 수정하세요. androidx의 경우에는 새로 추가된 OpenCV 4.1.1 빌드후에만 동작할 수도 있습니다.
변경전
import android.support.annotation.NonNull;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
변경후
import androidx.annotation.NonNull;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.media.ExifInterface;
import android.net.Uri;
import android.os.Build;
import android.provider.MediaStore;
import android.support.annotation.NonNull;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import org.opencv.android.Utils;
import org.opencv.core.Mat;
import java.io.IOException;
public class MainActivity extends AppCompatActivity {
static {
System.loadLibrary("opencv_java4");
System.loadLibrary("native-lib");
}
ImageView imageVIewObject;
ImageView imageVIewScene;
private Mat img_object;
private Mat img_scene;
private static final String TAG = "opencv";
private final int GET_GALLERY_IMAGE1 = 200;
private final int GET_GALLERY_IMAGE2 = 300;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
imageVIewObject = (ImageView)findViewById(R.id.imageViewObject);
imageVIewScene = (ImageView)findViewById(R.id.imageViewScene);
Button Button = (Button)findViewById(R.id.button);
Button.setOnClickListener(new View.OnClickListener(){
public void onClick(View v){
imageprocess_and_showResult();
}
});
imageVIewObject.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
Intent intent = new Intent(Intent.ACTION_PICK);
intent.setData(android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
intent.setType("image/*");
startActivityForResult(intent, GET_GALLERY_IMAGE1);
}
});
imageVIewScene.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
Intent intent = new Intent(Intent.ACTION_PICK);
intent.setData(android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
intent.setType("image/*");
startActivityForResult(intent, GET_GALLERY_IMAGE2);
}
});
if (!hasPermissions(PERMISSIONS)) { //퍼미션 허가를 했었는지 여부를 확인
requestNecessaryPermissions(PERMISSIONS);//퍼미션 허가안되어 있다면 사용자에게 요청
}
}
public native void imageprocessing(long objectImage, long sceneImage);
private void imageprocess_and_showResult() {
if (img_scene == null)
img_scene = new Mat();
Log.d("native-lib", "start");
imageprocessing(img_object.getNativeObjAddr(), img_scene.getNativeObjAddr());
Log.d("native-lib", "end");
Bitmap bitmapOutput = Bitmap.createBitmap(img_scene.cols(), img_scene.rows(), Bitmap.Config.ARGB_8888);
Utils.matToBitmap(img_scene, bitmapOutput);
imageVIewScene.setImageBitmap(bitmapOutput);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if ( requestCode == GET_GALLERY_IMAGE1){
if (data.getData() != null) {
Uri uri = data.getData();
try {
String path = getRealPathFromURI(uri);
int orientation = getOrientationOfImage(path); // 런타임 퍼미션 필요
Bitmap temp = MediaStore.Images.Media.getBitmap(getContentResolver(), uri);
Bitmap bitmap = getRotatedBitmap(temp, orientation);
imageVIewObject.setImageBitmap(bitmap);
img_object = new Mat();
Bitmap bmp32 = bitmap.copy(Bitmap.Config.ARGB_8888, true);
Utils.bitmapToMat(bmp32, img_object);
} catch (Exception e) {
e.printStackTrace();
}
}
}else if ( requestCode == GET_GALLERY_IMAGE2){
if (data.getData() != null) {
Uri uri = data.getData();
try {
String path = getRealPathFromURI(uri);
int orientation = getOrientationOfImage(path); // 런타임 퍼미션 필요
Bitmap temp = MediaStore.Images.Media.getBitmap(getContentResolver(), uri);
Bitmap bitmap = getRotatedBitmap(temp, orientation);
imageVIewScene.setImageBitmap(bitmap);
img_scene = new Mat();
Bitmap bmp32 = bitmap.copy(Bitmap.Config.ARGB_8888, true);
Utils.bitmapToMat(bmp32, img_scene);
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
private String getRealPathFromURI(Uri contentUri) {
String[] proj = {MediaStore.Images.Media.DATA};
Cursor cursor = getContentResolver().query(contentUri, proj, null, null, null);
cursor.moveToFirst();
int column_index = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
return cursor.getString(column_index);
}
// 출처 - http://snowdeer.github.io/android/2016/02/02/android-image-rotation/
public int getOrientationOfImage(String filepath) {
ExifInterface exif = null;
try {
exif = new ExifInterface(filepath);
} catch (IOException e) {
Log.d("@@@", e.toString());
return -1;
}
int orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, -1);
if (orientation != -1) {
switch (orientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
return 90;
case ExifInterface.ORIENTATION_ROTATE_180:
return 180;
case ExifInterface.ORIENTATION_ROTATE_270:
return 270;
}
}
return 0;
}
public Bitmap getRotatedBitmap(Bitmap bitmap, int degrees) throws Exception {
if(bitmap == null) return null;
if (degrees == 0) return bitmap;
Matrix m = new Matrix();
m.setRotate(degrees, (float) bitmap.getWidth() / 2, (float) bitmap.getHeight() / 2);
return Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), m, true);
}
// 퍼미션 코드
static final int PERMISSION_REQUEST_CODE = 1;
String[] PERMISSIONS = {"android.permission.WRITE_EXTERNAL_STORAGE"};
private boolean hasPermissions(String[] permissions) {
int ret = 0;
//스트링 배열에 있는 퍼미션들의 허가 상태 여부 확인
for (String perms : permissions){
ret = checkCallingOrSelfPermission(perms);
if (!(ret == PackageManager.PERMISSION_GRANTED)){
//퍼미션 허가 안된 경우
return false;
}
}
//모든 퍼미션이 허가된 경우
return true;
}
private void requestNecessaryPermissions(String[] permissions) {
//마시멜로( API 23 )이상에서 런타임 퍼미션(Runtime Permission) 요청
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
requestPermissions(permissions, PERMISSION_REQUEST_CODE);
}
}
@Override
public void onRequestPermissionsResult(int permsRequestCode, @NonNull String[] permissions, @NonNull int[] grantResults){
switch(permsRequestCode){
case PERMISSION_REQUEST_CODE:
if (grantResults.length > 0) {
boolean writeAccepted = grantResults[0] == PackageManager.PERMISSION_GRANTED;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (!writeAccepted )
{
showDialogforPermission("앱을 실행하려면 퍼미션을 허가하셔야합니다.");
return;
}
}
}
break;
}
}
private void showDialogforPermission(String msg) {
final AlertDialog.Builder myDialog = new AlertDialog.Builder( MainActivity.this);
myDialog.setTitle("알림");
myDialog.setMessage(msg);
myDialog.setCancelable(false);
myDialog.setPositiveButton("예", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface arg0, int arg1) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
requestPermissions(PERMISSIONS, PERMISSION_REQUEST_CODE);
}
}
});
myDialog.setNegativeButton("아니오", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface arg0, int arg1) {
finish();
}
});
myDialog.show();
}
}
6-1. native-lib.cpp
#include "opencv2/core.hpp"
#include "opencv2/core/utility.hpp"
#include "opencv2/core/ocl.hpp"
#include "opencv2/imgcodecs.hpp"
#include "opencv2/highgui.hpp"
#include "opencv2/features2d.hpp"
#include "opencv2/calib3d.hpp"
#include "opencv2/imgproc.hpp"
#include "opencv2/xfeatures2d.hpp"
#include <iostream>
#include <android/log.h>
using namespace cv;
using namespace cv::xfeatures2d;
using namespace std;
const int LOOP_NUM = 10;
const int GOOD_PTS_MAX = 50;
const float GOOD_PORTION = 0.15f;
int64 work_begin = 0;
int64 work_end = 0;
static void workBegin()
{
work_begin = getTickCount();
}
static void workEnd()
{
work_end = getTickCount() - work_begin;
}
static double getTime()
{
return work_end / ((double)getTickFrequency()) * 1000.;
}
struct SURFDetector
{
Ptr<Feature2D> surf;
SURFDetector(double hessian = 800.0)
{
surf = SURF::create(hessian);
}
template<class T>
void operator()(const T& in, const T& mask, std::vector<cv::KeyPoint>& pts, T& descriptors, bool useProvided = false)
{
surf->detectAndCompute(in, mask, pts, descriptors, useProvided);
}
};
template<class KPMatcher>
struct SURFMatcher
{
KPMatcher matcher;
template<class T>
void match(const T& in1, const T& in2, std::vector<cv::DMatch>& matches)
{
matcher.match(in1, in2, matches);
}
};
static Mat drawGoodMatches(
const Mat& img1,
const Mat& img2,
const std::vector<KeyPoint>& keypoints1,
const std::vector<KeyPoint>& keypoints2,
std::vector<DMatch>& matches,
std::vector<Point2f>& scene_corners_
)
{
//-- Sort matches and preserve top 10% matches
std::sort(matches.begin(), matches.end());
std::vector< DMatch > good_matches;
double minDist = matches.front().distance;
double maxDist = matches.back().distance;
const int ptsPairs = std::min(GOOD_PTS_MAX, (int)(matches.size() * GOOD_PORTION));
for (int i = 0; i < ptsPairs; i++)
{
good_matches.push_back(matches[i]);
}
std::cout << "\nMax distance: " << maxDist << std::endl;
std::cout << "Min distance: " << minDist << std::endl;
std::cout << "Calculating homography using " << ptsPairs << " point pairs." << std::endl;
// drawing the results
Mat img_matches;
drawMatches(img1, keypoints1, img2, keypoints2,
good_matches, img_matches, Scalar::all(-1), Scalar::all(-1),
std::vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS);
//-- Localize the object
std::vector<Point2f> obj;
std::vector<Point2f> scene;
for (size_t i = 0; i < good_matches.size(); i++)
{
//-- Get the keypoints from the good matches
obj.push_back(keypoints1[good_matches[i].queryIdx].pt);
scene.push_back(keypoints2[good_matches[i].trainIdx].pt);
}
//-- Get the corners from the image_1 ( the object to be "detected" )
std::vector<Point2f> obj_corners(4);
obj_corners[0] = Point(0, 0);
obj_corners[1] = Point(img1.cols, 0);
obj_corners[2] = Point(img1.cols, img1.rows);
obj_corners[3] = Point(0, img1.rows);
std::vector<Point2f> scene_corners(4);
Mat H = findHomography(obj, scene, RANSAC);
perspectiveTransform(obj_corners, scene_corners, H);
scene_corners_ = scene_corners;
//-- Draw lines between the corners (the mapped object in the scene - image_2 )
line(img_matches,
scene_corners[0] + Point2f((float)img1.cols, 0), scene_corners[1] + Point2f((float)img1.cols, 0),
Scalar(0, 255, 0), 2, LINE_AA);
line(img_matches,
scene_corners[1] + Point2f((float)img1.cols, 0), scene_corners[2] + Point2f((float)img1.cols, 0),
Scalar(0, 255, 0), 2, LINE_AA);
line(img_matches,
scene_corners[2] + Point2f((float)img1.cols, 0), scene_corners[3] + Point2f((float)img1.cols, 0),
Scalar(0, 255, 0), 2, LINE_AA);
line(img_matches,
scene_corners[3] + Point2f((float)img1.cols, 0), scene_corners[0] + Point2f((float)img1.cols, 0),
Scalar(0, 255, 0), 2, LINE_AA);
return img_matches;
}
float resize(UMat img_src, UMat &img_resize, int resize_width){
float scale = resize_width / (float)img_src.cols ;
if (img_src.cols > resize_width) {
int new_height = cvRound(img_src.rows * scale);
resize(img_src, img_resize, Size(resize_width, new_height));
}
else {
img_resize = img_src;
}
return scale;
}
6-2. native-lib.cpp
ocl::setUseOpenCL(true);
UMat img1, img2;
Mat &img_object = *(Mat *) objectImage;
Mat &img_scene = *(Mat *) sceneImage;
img_object.copyTo(img1);
img_scene.copyTo(img2);
float resizeRatio = resize(img2, img2, 800);
resize(img1, img1, 800);
cvtColor( img1, img1, COLOR_RGBA2GRAY);
cvtColor( img2, img2, COLOR_RGBA2GRAY);
double surf_time = 0.;
//declare input/output
std::vector<KeyPoint> keypoints1, keypoints2;
std::vector<DMatch> matches;
UMat _descriptors1, _descriptors2;
Mat descriptors1 = _descriptors1.getMat(ACCESS_RW),
descriptors2 = _descriptors2.getMat(ACCESS_RW);
//instantiate detectors/matchers
SURFDetector surf;
SURFMatcher<BFMatcher> matcher;
//-- start of timing section
for (int i = 0; i <= LOOP_NUM; i++)
{
if (i == 1) workBegin();
surf(img1.getMat(ACCESS_READ), Mat(), keypoints1, descriptors1);
surf(img2.getMat(ACCESS_READ), Mat(), keypoints2, descriptors2);
matcher.match(descriptors1, descriptors2, matches);
}
workEnd();
__android_log_print(ANDROID_LOG_DEBUG, "native-lib :: ",
"%d keypoints on object image", keypoints1.size());
__android_log_print(ANDROID_LOG_DEBUG, "native-lib :: ",
"%d keypoints on scene image", keypoints2.size());
surf_time = getTime();
__android_log_print(ANDROID_LOG_DEBUG, "native-lib :: ",
"SURF run time: %f ms", surf_time / LOOP_NUM);
std::vector<Point2f> corner;
Mat img_matches = drawGoodMatches(img1.getMat(ACCESS_READ), img2.getMat(ACCESS_READ), keypoints1, keypoints2, matches, corner);
line(img_scene, Point2f(corner[0].x/resizeRatio, corner[0].y/resizeRatio), Point2f(corner[1].x/resizeRatio, corner[1].y/resizeRatio), Scalar(0, 255, 0, 255), 10);
line(img_scene, Point2f(corner[1].x/resizeRatio, corner[1].y/resizeRatio), Point2f(corner[2].x/resizeRatio, corner[2].y/resizeRatio), Scalar(0, 255, 0, 255), 10);
line(img_scene, Point2f(corner[2].x/resizeRatio, corner[2].y/resizeRatio), Point2f(corner[3].x/resizeRatio, corner[3].y/resizeRatio), Scalar(0, 255, 0, 255), 10);
line(img_scene, Point2f(corner[3].x/resizeRatio, corner[3].y/resizeRatio), Point2f(corner[0].x/resizeRatio, corner[0].y/resizeRatio), Scalar(0, 255, 0, 255), 10);
__android_log_print(ANDROID_LOG_DEBUG, "native-lib :: ", "draw box %f %f", corner[0].x/resizeRatio, corner[0].y/resizeRatio );
__android_log_print(ANDROID_LOG_DEBUG, "native-lib :: ", "draw box %f %f", corner[1].x/resizeRatio, corner[1].y/resizeRatio );
__android_log_print(ANDROID_LOG_DEBUG, "native-lib :: ", "draw box %f %f", corner[2].x/resizeRatio, corner[2].y/resizeRatio );
__android_log_print(ANDROID_LOG_DEBUG, "native-lib :: ", "draw box %f %f", corner[3].x/resizeRatio, corner[3].y/resizeRatio );
7. CMakeLists.txt
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html
# Sets the minimum version of CMake required to build the native library.
cmake_minimum_required(VERSION 3.4.1)
set(pathPROJECT C:/Users/webnautes/AndroidStudioProjects/UseOpenCVwithCMake) # 수정필요
set(pathOPENCV ${pathPROJECT}/opencv)
set(pathLIBOPENCV_JAVA ${pathOPENCV}/native/libs/${ANDROID_ABI}/libopencv_java4.so)
set(CMAKE_VERBOSE_MAKEFILE on)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=gnu++11")
include_directories(${pathOPENCV}/native/jni/include)
# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.
add_library( # Sets the name of the library.
native-lib
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
${pathPROJECT}/app/src/main/cpp/native-lib.cpp )
add_library( lib_opencv SHARED IMPORTED )
set_target_properties(lib_opencv PROPERTIES IMPORTED_LOCATION ${pathLIBOPENCV_JAVA})
# Searches for a specified prebuilt library and stores the path as a
# variable. Because CMake includes system libraries in the search path by
# default, you only need to specify the name of the public NDK library
# you want to add. CMake verifies that the library exists before
# completing its build.
find_library( # Sets the name of the path variable.
log-lib
# Specifies the name of the NDK library that
# you want CMake to locate.
log )
# Specifies libraries CMake should link to your target library. You
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.
target_link_libraries( # Specifies the target library.
native-lib
lib_opencv
# Links the target library to the log library
# included in the NDK.
${log-lib} )
반응형
'OpenCV > Android 개발 환경 및 예제' 카테고리의 다른 글
Android NDK + CMake + OpenCV 카메라 예제 프로젝트 생성방법 (1) | 2023.11.26 |
---|---|
OpenCV, Android, NDK 를 사용하여 Android에서 Face Detection(얼굴 검출) (7) | 2023.10.14 |
Android 용으로 OpenCV 4.4.0 빌드하는 방법 (Build OpenCV 4.4.0 for Android ) (255) | 2020.09.27 |
OpenCV 강좌 - 안드로이드 폰에서 딥러닝 네트워크(deep learning network) 실행하기 (Caffe) (10) | 2019.08.27 |
Android NDK + OpenCV 관심영역(ROI)에 영상처리하는 예제 (36) | 2019.05.13 |
시간날때마다 틈틈이 이것저것 해보며 블로그에 글을 남깁니다.
블로그의 문서는 종종 최신 버전으로 업데이트됩니다.
여유 시간이 날때 진행하는 거라 언제 진행될지는 알 수 없습니다.
영화,책, 생각등을 올리는 블로그도 운영하고 있습니다.
https://freewriting2024.tistory.com
제가 쓴 책도 한번 검토해보세요 ^^
@webnautes :: 멈춤보단 천천히라도
그렇게 천천히 걸으면서도 그렇게 빨리 앞으로 나갈 수 있다는 건.
포스팅이 좋았다면 "좋아요❤️" 또는 "구독👍🏻" 해주세요!