错误:当Mat对象从Java传递到jni函数时出现“致命信号11(SIGSEGV),代码1”
问题描述:
我正在运行使用OpenCV
函数的摄像机。我通过Mat
对象为jni
功能,它工作了一段时间,他们的错误:错误:当Mat对象从Java传递到jni函数时出现“致命信号11(SIGSEGV),代码1”
10-10 13:03:17.978: A/libc(28693): Fatal signal 11 (SIGSEGV), code 1, fault addr 0x9 in tid 28791 (Thread-5418)
运行摄像头,并调用JNI函数
Java代码:
package com.adhamenaya;
import java.util.ArrayList;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
//import org.opencv.highgui.Highgui;
import org.opencv.imgproc.Imgproc;
import android.app.Activity;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnTouchListener;
import android.view.WindowManager;
public class MainActivity extends Activity implements CvCameraViewListener2,
OnTouchListener {
private static final String TAG = "OCVSample::Activity";
private Mat mRgba;
private Mat mGray;
private CameraBridgeViewBase mOpenCvCameraView;
private ArrayList<Mat> mats = new ArrayList<Mat>();
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS: {
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(MainActivity.this);
}
break;
default: {
super.onManagerConnected(status);
}
break;
}
}
};
public MainActivity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
Native.loadlibs();
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.cam_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
Native.setup(mFaceCascadeFile, mNoseCascadeFile, mLandmarks);
}
@Override
public void onPause() {
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public void onResume() {
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this,
mLoaderCallback);
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mGray = new Mat();
mRgba = new Mat();
}
public void onCameraViewStopped() {
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
Imgproc.cvtColor(mRgba, mGray, Imgproc.COLOR_BGRA2GRAY);
Native.runJni(mFaceCascadeFile, mNoseCascadeFile, mLandmarks,
mRgba.getNativeObjAddr());
return mRgba;
}
@Override
public boolean onTouch(View v, MotionEvent event) {
// TODO Auto-generated method stub
return false;
}
}
JNI功能:
JNIEXPORT jbyteArray JNICALL Java_com_adhamenaya_Native_runJni(JNIEnv * env,
jobject obj, jstring faceCascadeFile, jstring noseCascadeFile,
jstring landmarks, jlong frame) {
cv::Mat& inFrame = *(cv::Mat*) frame;
if (!gsys.loadFaceCascade(faceCascadeFnameStr)) {
LOG("Could not load face cascade");
gsys.loadFaceCascade(faceCascadeFnameStr);
} else {
LOG("Face cascade: OK");
}
if (!gsys.loadNoseCascade(noseCascadeFnameStr)) {
LOG("Could not load nose cascade");
gsys.loadNoseCascade(noseCascadeFnameStr);
} else {
LOG("Nose cascade: OK");
}
gsys.setFrameRate(30);
gsys.setProgramState(DETECT);
clock_t tin, tout = 0;
cv::flip(inFrame, inFrame, 0);
cv::transpose(inFrame, inFrame);
dlib::shape_predictor pose_model;
dlib::deserialize(landmarksStr) >> pose_model;
gsys.setCurrentFrame(inFrame);
tin = clock();
trigger_hr(gsys, faces, pose_model);
// Process the frame
size_t spm;
float motionStrengthX, motionStrengthY;
float phiYaw = -0xFFFFFFFF, thetaPitch = -0xFFFFFFFF;
if (faces.size()) {
faces[0].getSpm(gsys, spm, motionStrengthX, motionStrengthY);
faces[0].getFacePose(phiYaw, thetaPitch);
}
tout = tout + clock() - tin;
if ((gsys.getFrameCount() % 30) == 29) {
double secs_between_frames = (double) (tout)/(CLOCKS_PER_SEC * 30.0f);
printf("FPS = %2.2f\n", 1.0f/secs_between_frames);
LOG("FPS = %2.2f ", 1.0f/secs_between_frames);
tout = 0;
}
char spmText[100];
//sprintf(spmText,
// "SPM = %zu, P = %2.2f, T = %2.2f, MS-X = %2.2f, MS-Y = %2.2f", spm,
// phiYaw, thetaPitch, motionStrengthX, motionStrengthY);
LOG("SPM = %zu, P = %2.2f, T = %2.2f, MS-X = %2.2f, MS-Y = %2.2f", spm,
phiYaw, thetaPitch, motionStrengthX, motionStrengthY);
std::string str;
str = "SPM=";
jbyteArray arr = env->NewByteArray(str.length());
env->SetByteArrayRegion(arr, 0, str.length(), (jbyte*) str.c_str());
return arr;
}
请帮助我。
答
经过两天的在线搜索,我可以发现问题是由于'内存泄漏',而这种情况发生在我读取视频中的帧并向其发送功能时发生,而不释放帧之后完成他们的工作,所以我总是会有内存中的帧。
我所做的是将C++
代码中的Mat对象移动到函数范围之外,并将其作为类作用域对象,因此每次调用函数时都不会创建新对象。
而且,我叫:
inFrame.release();
完成它的工作后,释放内存。
您是否确定了碰撞发生的地方?日志的相关部分很有价值。一旦确定了该行,就可以在该行上打印输入到该语句的数据。 – 18446744073709551615
@ 18446744073709551615感谢您的评论,我可以自己找出问题所在。 –