Java通过多线程共享对象-需要设计模式
我想得到一些关于我正在设计的简单多线程系统的建议 这个想法: 应用程序正在捕获帧并在第一个imageview中显示它们。这些捕获的帧也正在被处理(由MyHandDetectionThread),然后显示在第二个imageview中 我的解决方案:Java通过多线程共享对象-需要设计模式,java,multithreading,oop,design-patterns,sharing,Java,Multithreading,Oop,Design Patterns,Sharing,我想得到一些关于我正在设计的简单多线程系统的建议 这个想法: 应用程序正在捕获帧并在第一个imageview中显示它们。这些捕获的帧也正在被处理(由MyHandDetectionThread),然后显示在第二个imageview中 我的解决方案: public class VideoManager { private volatile BufferLinkedList<InputFrame> mInputFrames; private volatile BufferL
public class VideoManager {
private volatile BufferLinkedList<InputFrame> mInputFrames;
private volatile BufferLinkedList<ProcessedFrame> mProcessedFrames;
private static VideoManager mVideoManagerInstance = new VideoManager();
private Timer captureTimer;
private MyVideoCaptureThread myVideoCaptureThread;
private MyFrameDisplayThread myFrameDisplayThread;
private MyHandDetectionThread myHandDetectionThread;
private MyProcessedFrameDisplayThread myProcessedFrameDisplayThread;
private enum ThreadMessages {
PROCESS_INPUT_FRAME,
NEW_INPUT_FRAME,
NEW_PROCESSED_FRAME_ARRIVED,
GET_NEW_FRAME
}
public static VideoManager getInstance() {
if (mVideoManagerInstance == null) {
mVideoManagerInstance = new VideoManager();
}
return mVideoManagerInstance;
}
// not visible constructor - for singleton purposes
private VideoManager() {
mInputFrames = new BufferLinkedList<>(Config.inputFramesListLimit);
mProcessedFrames = new BufferLinkedList<>(Config.inputFramesListLimit);
}
public void startDetectionAndRecognition(ImageView camIV, ImageView handIV) {
mInputFrames = new BufferLinkedList<>(Config.inputFramesListLimit);
mProcessedFrames = new BufferLinkedList<>(Config.inputFramesListLimit);
captureTimer = new Timer();
myVideoCaptureThread = new MyVideoCaptureThread();
myFrameDisplayThread = new MyFrameDisplayThread(camIV, handIV);
myHandDetectionThread = new MyHandDetectionThread();
myProcessedFrameDisplayThread = new MyProcessedFrameDisplayThread();
captureTimer.schedule(new TimerTask() {
public void run() {
if (myVideoCaptureThread != null && myVideoCaptureThread.threadMessages != null)
myVideoCaptureThread.threadMessages.offer(ThreadMessages.GET_NEW_FRAME);
}
}, 0, 1000 / Config.fps);
myFrameDisplayThread.start();
myVideoCaptureThread.start();
myHandDetectionThread.start();
myProcessedFrameDisplayThread.start();
}
public void stop() {
captureTimer.cancel();
myVideoCaptureThread.interrupt();
myHandDetectionThread.interrupt();
myFrameDisplayThread.interrupt();
myGestureRecogitionThread.interrupt();
mInputFrames.removeAll(mInputFrames);
mProcessedFrames.removeAll(mProcessedFrames);
isActive = false;
}
public boolean isActive() {
return isActive;
}
////////////////////////
// Thread clases
////////////////////////
private class MyVideoCaptureThread extends Thread {
LinkedBlockingQueue<ThreadMessages> threadMessages = new LinkedBlockingQueue<>(128);
@Override
public void run() {
WebCamVideoCapture vc = new WebCamVideoCapture();
while (!isInterrupted()) {
if (threadMessages != null && threadMessages.poll() == ThreadMessages.GET_NEW_FRAME) {
Mat mat = vc.getNextMatFrame();
if (mat != null && mInputFrames != null) {
mInputFrames.offerFirst(new InputFrame(mat));
if (myFrameDisplayThread != null && myFrameDisplayThread.threadMessages != null)
myFrameDisplayThread.threadMessages.offer(ThreadMessages.NEW_INPUT_FRAME);
if (myHandDetectionThread != null && myHandDetectionThread.threadMessages != null)
myHandDetectionThread.threadMessages.offer(ThreadMessages.PROCESS_INPUT_FRAME);
}
}
}
vc.close();
}
}
private class MyFrameDisplayThread extends Thread {
LinkedBlockingQueue<ThreadMessages> threadMessages = new LinkedBlockingQueue<>(128);
ImageView mCamImageView;
long lastUpdatedCamImageViewMillis;
long lastUpdatedHandImageViewMillis;
public MyFrameDisplayThread(ImageView mImageView) {
this.mCamImageView = mImageView;
}
private synchronized void updateImageViews() {
if (threadMessages.poll() == ThreadMessages.NEW_INPUT_FRAME && mInputFrames != null && !mInputFrames.isEmpty() && mInputFrames.peek() != null && mInputFrames.peek().getFrame() != null) {
if(Config.IS_DEBUG) System.out.println("Updating frame image view");
mCamImageView.setImage(Utils.cvMatToImage(mInputFrames.peekFirst().getFrame()));
}
}
@Override
public void run() {
while (!isInterrupted()) {
updateImageViews();
}
}
}
private class MyHandDetectionThread extends Thread {
LinkedBlockingQueue<ThreadMessages> threadMessages = new LinkedBlockingQueue<>(128); //TODO if multiple threads, define it out of class
HandDetector hd = new HandDetector();
@Override
public void run() {
while (!isInterrupted()) {
if (threadMessages.poll() == ThreadMessages.PROCESS_INPUT_FRAME && mInputFrames != null && mInputFrames.size() > 0 && mInputFrames.peek() != null) {
if(Config.IS_DEBUG) System.out.println("Detecting hand...");
mProcessedFrames.offerFirst(new ProcessedFrame(hd.detectHand(mInputFrames.peek()), null, null, null));
if (myGestureRecogitionThread != null && myGestureRecogitionThread.threadMessages != null)
myGestureRecogitionThread.threadMessages.offer(ThreadMessages.NEW_PROCESSED_FRAME_ARRIVED);
if(myFrameDisplayThread != null && myFrameDisplayThread.threadMessages != null)
myFrameDisplayThread.threadMessages.offer(ThreadMessages.NEW_PROCESSED_FRAME_ARRIVED);
}
}
}
}
private class MyProcessedFrameDisplayThread extends Thread {
LinkedBlockingQueue<ThreadMessages> threadMessages = new LinkedBlockingQueue<>(128);
ImageView mHandImageView;
public MyProcessedFrameDisplayThread(ImageView mHandImageView) {
mHandImageView = mHandImageView;
}
private synchronized void updateImageViews() {
if(threadMessages.poll() == ThreadMessages.NEW_PROCESSED_FRAME_ARRIVED && mProcessedFrames != null && !mProcessedFrames.isEmpty() && mProcessedFrames.peek() != null && mProcessedFrames.peek().getmHandMask() != null) {
if(Config.IS_DEBUG) System.out.println("Updating hand image view");
mHandImageView.setImage(Utils.cvMatToImage(mProcessedFrames.peekFirst().getmHandMask()));
}
}
@Override
public void run() {
while (!isInterrupted())
if (threadMessages.poll() == ThreadMessages.NEW_PROCESSED_FRAME_ARRIVED)
updateImageViews();
}
}
}
公共类视频管理器{
私有易失性缓冲LinkedList mInputFrames;
私有易失性缓冲区LinkedList MPProcessedFrames;
私有静态VideoManager mVideoManagerInstance=新的VideoManager();
私人定时器捕获定时器;
私人MyVideoCapturatRead MyVideoCapturatRead;
私有MyFrameDisplayThread MyFrameDisplayThread;
私有MyHandDetectionThread MyHandDetectionThread;
私有MyProcessedFrameDisplayThread MyProcessedFrameDisplayThread;
私有枚举线程消息{
处理输入帧,
新的输入框,
新的已处理的帧到达,
获取新的框架
}
公共静态VideoManager getInstance(){
if(mVideoManagerInstance==null){
mVideoManagerInstance=新的VideoManager();
}
返回mvr实例;
}
//不可见构造函数-用于单例目的
私人视频管理器(){
mInputFrames=新的BufferLinkedList(Config.inputFramesListLimit);
mpProcessedFrames=新的BufferLinkedList(Config.inputFramesListLimit);
}
公共空间开始检测和识别(ImageView camIV、ImageView handIV){
mInputFrames=新的BufferLinkedList(Config.inputFramesListLimit);
mpProcessedFrames=新的BufferLinkedList(Config.inputFramesListLimit);
captureTimer=新计时器();
MyVideoCaptUratRead=新建MyVideoCaptUratRead();
myFrameDisplayThread=新的myFrameDisplayThread(camIV、handIV);
myHandDetectionThread=新的myHandDetectionThread();
myProcessedFrameDisplayThread=新的myProcessedFrameDisplayThread();
captureTimer.schedule(新TimerTask(){
公开募捐{
if(myvideocapturatread!=null&&myvideocapturatread.threadMessages!=null)
myvideocapturatread.threadMessages.offer(threadMessages.GET_NEW_FRAME);
}
},0,1000/Config.fps);
myFrameDisplayThread.start();
myvideocapturead.start();
myHandDetectionThread.start();
myProcessedFrameDisplayThread.start();
}
公共停车场(){
captureTimer.cancel();
myvideocapturead.interrupt();
myHandDetectionThread.interrupt();
myFrameDisplayThread.interrupt();
myGestureRecogitionThread.interrupt();
mInputFrames.removeAll(mInputFrames);
mpprocessedframes.removeAll(mpprocessedframes);
isActive=假;
}
公共布尔isActive(){
回报是积极的;
}
////////////////////////
//螺纹包层
////////////////////////
私有类MyVideoCaptRead扩展线程{
LinkedBlockingQueue threadMessages=新LinkedBlockingQueue(128);
@凌驾
公开募捐{
WebCamVideoCapture vc=新的WebCamVideoCapture();
而(!isInterrupted()){
if(threadMessages!=null&&threadMessages.poll()==threadMessages.GET\u NEW\u FRAME){
Mat Mat=vc.getNextMatFrame();
if(mat!=null&&mInputFrames!=null){
mInputFrames.offerFirst(新输入帧(mat));
if(myFrameDisplayThread!=null&&myFrameDisplayThread.threadMessages!=null)
myFrameDisplayThread.threadMessages.offer(threadMessages.NEW\u INPUT\u FRAME);
if(myHandDetectionThread!=null&&myHandDetectionThread.threadMessages!=null)
myHandDetectionThread.threadMessages.offer(threadMessages.PROCESS\u输入\u帧);
}
}
}
vc.close();
}
}
私有类MyFrameDisplayThread扩展线程{
LinkedBlockingQueue threadMessages=新LinkedBlockingQueue(128);
ImageView mCamImageView;
长时间更新的CamImageViewMillis;
长上次更新的HandImageViewMillis;
公共MyFrameDisplayThread(ImageView-mImageView){
this.mCamImageView=mImageView;
}
私有同步的void updateImage视图(){
if(threadMessages.poll()==threadMessages.NEW_INPUT_FRAME&&mInputFrames!=null&&mInputFrames.isEmpty()&&mInputFrames.peek()!=null&&mInputFrames.peek().getFrame()!=null){
if(Config.IS_DEBUG)System.out.println(“更新帧图像视图”);
mCamImageView.setImage(Utils.cvMatToImage(mInputFrames.peekFirst().getFrame());
}
}
@凌驾
公开募捐{
而(!isInterrupted()){
updateImage视图();
}
}
}
私有类MyHandDetectionThread扩展线程{
LinkedBlockingQueue threadMessages=新建LinkedBlockingQueue(128);//TODO如果有多个线程,请在类外定义它
手动检测器hd=新的手动检测器();
@凌驾
公开募捐{
而(!isInterrupted()){
if(threadMessages.poll()==threadMessages.PROCESS\u INPUT\u FRAME&&mInputFrames!=null&&mInputFrames.size()>0&&mInputFrames.peek()!=null){
if(Config.IS_DEBUG)System.out.println(“检测手…”);
mpprocessedframes.offerFirst(新的ProcessedFrame(hd.detectHand(mInputFrames.peek()),null,null,null));
if(myGestureRecogitionThread!=null&&myGestureRecogitionThread.threadMessages!=null)
myGestureRecogitionThread.threadMessages.offer(threadMessages.NEW_PROCESSED_FRAME_到达)
public class BufferLinkedList<E> extends LinkedList<E> {
private int counter = 0;
private int sizeLimit = 48;
public BufferLinkedList(int sizeLimit) {
this.sizeLimit = sizeLimit;
}
@Override
public synchronized boolean offerFirst(E e) {
while(size() > sizeLimit) {
removeLast();
}
return super.offerFirst(e);
}
@Override
public synchronized E peekFirst() {
return super.peekFirst();
}
@Override
public synchronized E peekLast() {
return super.peekLast();
}
@Override
public synchronized E pollFirst() {
return super.pollFirst();
}
@Override
public synchronized E pollLast() {
return super.pollLast();
}
}
public class VideoManager {
private volatile BufferLinkedList<InputFrame> mInputFrames;
private volatile BufferLinkedList<ProcessedFrame> mProcessedFrames;
private static VideoManager mVideoManagerInstance = new VideoManager();
private Timer captureTimer;
private MyVideoCaptureThread myVideoCaptureThread;
private MyFrameDisplayThread myFrameDisplayThread;
private MyHandDetectionThread myHandDetectionThread;
private MyGestureRecogitionThread myGestureRecogitionThread;
private MySkinDisplayThread mySkinDisplayThread;
private final static int THREAD_MESSAGES_LIMIT = 10000;
private final static int TIMER_INTERVAL = 1000 / Config.fps;
private final static int WAITING_TIMEOUT = 2000;
private enum ThreadMessages {
PROCESS_INPUT_FRAME,
NEW_INPUT_FRAME,
NEW_PROCESSED_FRAME_ARRIVED,
GET_NEW_FRAME
}
public static VideoManager getInstance() {
if (mVideoManagerInstance == null) {
mVideoManagerInstance = new VideoManager();
}
return mVideoManagerInstance;
}
// not visible constructor - for singleton purposes
private VideoManager() {
mInputFrames = new BufferLinkedList<>(Config.inputFramesListLimit);
mProcessedFrames = new BufferLinkedList<>(Config.inputFramesListLimit);
}
public void startDetectionAndRecognition(ImageView camIV, ImageView handIV) {
mInputFrames = new BufferLinkedList<>(Config.inputFramesListLimit);
mProcessedFrames = new BufferLinkedList<>(Config.inputFramesListLimit);
captureTimer = new Timer();
myFrameDisplayThread = new MyFrameDisplayThread(camIV);
myVideoCaptureThread = new MyVideoCaptureThread();
myHandDetectionThread = new MyHandDetectionThread();
myGestureRecogitionThread = new MyGestureRecogitionThread();
mySkinDisplayThread = new MySkinDisplayThread(handIV);
myVideoCaptureThread.start();
captureTimer.schedule(new TimerTask() {
public void run() {
if (myVideoCaptureThread != null && myVideoCaptureThread.threadMessages != null) {
myVideoCaptureThread.threadMessages.offer(ThreadMessages.GET_NEW_FRAME);
System.out.println("Timer get frame request sent");
myVideoCaptureThread.wakeUp();
}
}
}, 0, TIMER_INTERVAL);
myFrameDisplayThread.start();
mySkinDisplayThread.start();
myHandDetectionThread.start();
myGestureRecogitionThread.start();
}
public void stop() {
captureTimer.cancel();
myVideoCaptureThread.interrupt();
myHandDetectionThread.interrupt();
mySkinDisplayThread.interrupt();
myFrameDisplayThread.interrupt();
myGestureRecogitionThread.interrupt();
mInputFrames.removeAll(mInputFrames);
mProcessedFrames.removeAll(mProcessedFrames);
}
////////////////////////
// Lock class
////////////////////////
private static final class Lock {}
////////////////////////
// Thread clases
////////////////////////
private class MyVideoCaptureThread extends Thread {
volatile LinkedBlockingQueue<ThreadMessages> threadMessages = new LinkedBlockingQueue<>(THREAD_MESSAGES_LIMIT);
WebCamVideoCapture vc = new WebCamVideoCapture();
Lock lock = new Lock();
@Override
public void run() {
synchronized (lock) {
while (!isInterrupted()) {
if (threadMessages.poll() != ThreadMessages.GET_NEW_FRAME) {
try {
lock.wait(WAITING_TIMEOUT);
System.out.println("WideoCaptureThread waiting");
} catch (InterruptedException e) {
e.printStackTrace();
}
}
Mat mat = vc.getNextMatFrame();
System.out.println("getting next frame from webcam");
if (mat != null && mInputFrames != null) {
mInputFrames.offerFirst(new InputFrame(vc.getNextMatFrame()));
if (myHandDetectionThread != null && myHandDetectionThread.threadMessages != null) {
myHandDetectionThread.wakeUp();
myHandDetectionThread.threadMessages.offer(ThreadMessages.PROCESS_INPUT_FRAME);
}
if (myFrameDisplayThread != null && myFrameDisplayThread.threadMessages != null) {
myFrameDisplayThread.wakeUp();
myFrameDisplayThread.threadMessages.offer(ThreadMessages.NEW_INPUT_FRAME);
}
}
}
}
}
public void wakeUp() {
synchronized (lock) {
lock.notifyAll();
System.out.println("Waking up WideoCapture");
}
}
@Override
public void interrupt() {
vc.close();
super.interrupt();
}
}
private class MyFrameDisplayThread extends Thread {
volatile LinkedBlockingQueue<ThreadMessages> threadMessages = new LinkedBlockingQueue<>(THREAD_MESSAGES_LIMIT);
Lock lock = new Lock();
ImageView mCamImageView;
public MyFrameDisplayThread(ImageView mImageView) {
this.mCamImageView = mImageView;
}
private void updateImageViews() {
if (shouldUpdateCamImageView() && mInputFrames != null && !mInputFrames.isEmpty() && mInputFrames.peek() != null && mInputFrames.peek().getFrame() != null) {
System.out.println("Updating frame image view");
mCamImageView.setImage(Utils.cvMatToImage(mInputFrames.peekFirst().getFrame()));
threadMessages.poll();
}
}
@Override
public void run() {
synchronized (lock) {
while (!isInterrupted()) {
if (threadMessages.peek() != ThreadMessages.NEW_INPUT_FRAME) {
try {
lock.wait(WAITING_TIMEOUT);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
updateImageViews();
}
}
}
public void wakeUp() {
synchronized (lock) {
lock.notifyAll();
System.out.println("Waking up FrameDisplay");
}
}
private boolean shouldUpdateCamImageView() {
if (!Config.CAPTURE_PREVIEW_MODE) return false;
return true;
}
}
private class MySkinDisplayThread extends Thread {
volatile LinkedBlockingQueue<ThreadMessages> threadMessages = new LinkedBlockingQueue<>(THREAD_MESSAGES_LIMIT);
ImageView mHandImageView;
Object lock = new Lock();
public MySkinDisplayThread(ImageView mHandImageView) {
this.mHandImageView = mHandImageView;
}
private synchronized void updateHandImageView() {
if (shouldUpdateHandImageView() && mProcessedFrames != null && !mProcessedFrames.isEmpty() && mProcessedFrames.peek() != null && mProcessedFrames.peek().getmHandMask() != null) {
System.out.println("Updating skin image view");
mHandImageView.setImage(Utils.cvMatToImage(mProcessedFrames.peekFirst().getmHandMask()));
threadMessages.poll();
}
}
@Override
public void run() {
synchronized (lock) {
while (!isInterrupted()) {
if (threadMessages.peek() != ThreadMessages.NEW_PROCESSED_FRAME_ARRIVED) {
try {
lock.wait(WAITING_TIMEOUT);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
updateHandImageView();
}
}
}
private boolean shouldUpdateHandImageView() {
if (!Config.SKIN_MASK_PREVIEW_MODE) return false;
return true;
// long now = System.currentTimeMillis();
// boolean should = now - lastUpdatedHandImageViewMillis > TIMER_INTERVAL;
// lastUpdatedHandImageViewMillis = now;
// return should;
}
public void wakeUp() {
synchronized (lock) {
lock.notifyAll();
System.out.println("Waking up FrameDisplay");
}
}
}
private class MyHandDetectionThread extends Thread {
volatile LinkedBlockingQueue<ThreadMessages> threadMessages = new LinkedBlockingQueue<>(THREAD_MESSAGES_LIMIT); //TODO if multiple threads, define it out of class
HandDetector hd = new HandDetector();
Object lock = new Lock();
@Override
public void run() {
synchronized (lock) {
while (!isInterrupted()) {
if (threadMessages.poll() != ThreadMessages.PROCESS_INPUT_FRAME) {
try {
lock.wait(WAITING_TIMEOUT);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
if (mInputFrames != null /*&& mInputFrames.size() > 0 && mInputFrames.peek() != null && !mInputFrames.peek().getIsProcessed()*/) {
System.out.println("Detecting hand...");
// Mat handMask = hd.detectHand(mInputFrames.peek());
// int[][] fingerCoordinates = new int[5][2];
// int[] convDefects = new int[5];
// int[] handCenterCoordinates = new int[2];
mProcessedFrames.offerFirst(new ProcessedFrame(hd.detectHand(mInputFrames.peek()), null, null, null));
if (myGestureRecogitionThread != null && myGestureRecogitionThread.threadMessages != null) {
myGestureRecogitionThread.threadMessages.offer(ThreadMessages.NEW_PROCESSED_FRAME_ARRIVED);
mySkinDisplayThread.wakeUp();
}
if (mySkinDisplayThread != null && mySkinDisplayThread.threadMessages != null) {
mySkinDisplayThread.threadMessages.offer(ThreadMessages.NEW_PROCESSED_FRAME_ARRIVED);
mySkinDisplayThread.wakeUp();
}
}
}
}
}
public void wakeUp() {
synchronized (lock) {
lock.notifyAll();
System.out.println("Waking up hand Detection");
}
}
}
private class MyGestureRecogitionThread extends Thread {
volatile LinkedBlockingQueue<ThreadMessages> threadMessages = new LinkedBlockingQueue<>(THREAD_MESSAGES_LIMIT);
GestureRecognizer r = new GestureRecognizer();
Lock lock = new Lock();
@Override
public void run() {
synchronized (lock) {
while (!isInterrupted()) {
if (threadMessages.poll() != ThreadMessages.NEW_PROCESSED_FRAME_ARRIVED) {
try {
lock.wait(WAITING_TIMEOUT);
} catch (InterruptedException e) {
e.printStackTrace();
}
} else {
r.lookForGestures(mProcessedFrames);
}
}
}
}
public void wakeUp() {
synchronized (lock) {
lock.notifyAll();
System.out.println("Waking up hand Detection");
}
}
}
}