WPF PrismとNyARToolKitCSで複数のマーカー位置推定

先ほど、マーカーの位置推定プログラムを書きましたが、今度は複数のマーカーの位置推定を行うプログラムです。
http://kowaimononantenai.blogspot.jp/2017/02/wpf-prismnyartoolkit.html

ファイル構成は上のサイトと同じです。

初めにViewのXamlです。これは上のサイトと全く同じです。
        xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
Title="MainWindow" Height="350" Width="525">



次はViewModelです。
カメラ画像を表示するために、async/awaitで非同期処理を行っています。
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;

using Microsoft.Practices.Prism.Mvvm;
using Microsoft.Practices.Prism.Commands;

using System.Threading;
using System.Windows.Media.Imaging;
using System.Numerics; //.Net Framework 4.6以上

namespace NyARCaptureTest
{
class MainWindowViewModel: BindableBase
{
NyARPositionEstimation NyAr;
bool isTask;

///
/// Notifying Property Change
///

private WriteableBitmap bmp;
public WriteableBitmap Bmp
{
get { return bmp; }
set { SetProperty(ref bmp, value); }
}

///
/// DelegateCommand
///

private DelegateCommand startCommand;
public DelegateCommand StartCommand
{
get{ return startCommand ?? (startCommand = new DelegateCommand(Start));}
}
private async void Start()
{
NyAr.Start();

isTask = true;
await ShowImage();
}

private DelegateCommand stopCommand;
public DelegateCommand StopCommand
{
get { return stopCommand ?? (stopCommand = new DelegateCommand(Stop)); }
}
private void Stop()
{
NyAr.Stop();
isTask = false;
}


public MainWindowViewModel()
{
NyAr = new NyARPositionEstimation();
}

///
/// Task
///

///
private async Task ShowImage()
{
while (isTask)
{
if (NyAr.IsCaptureOk)
{
Bmp = NyAr.GetImage();

Matrix4x4[] matrix;
bool[] isEstimateOk;
NyAr.GetMatrix(out matrix, out isEstimateOk);

if (isEstimateOk[0] == true)
{
Console.WriteLine(matrix[0]);
}
}
await Task.Delay(30);
}
}
}
}




using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;

using System.IO;
using System.Drawing;
using System.Drawing.Imaging;
using System.Windows.Media.Imaging;
using System.Threading;
using System.Numerics; //.Net Framework 4.6以上

using jp.nyatla.nyartoolkit.cs;
using jp.nyatla.nyartoolkit.cs.core;
using jp.nyatla.nyartoolkit.cs.detector;
using NyARToolkitCSUtils.Capture;
using NyARToolkitCSUtils.Direct3d;
using OpenCvSharp;
using OpenCvSharp.CPlusPlus;
using OpenCvSharp.Extensions;

namespace NyARCaptureTest
{
class NyARPositionEstimation : CaptureListener
{
private NyARToolkitCSUtils.Capture.CaptureDevice cap;
private String[] arCodeFile = { "patt.hiro", "patt.kanji" };
private const String arCameraFile = "camera_para.dat";
private NyARDetectMarker ar;
private DsRgbRaster raster;
private Mat img;
private Object lockObj = new object();
private Matrix4x4[] Matrix;

public bool IsCaptureOk { get; private set; }


public NyARPositionEstimation()
{
IsCaptureOk = false;
Matrix = new Matrix4x4[2];
Matrix[0] = Matrix4x4.Identity;
Matrix[1] = Matrix4x4.Identity;

//ARの設定
//AR用カメラパラメタファイルをロード
NyARParam ap = NyARParam.loadFromARParamFile(File.OpenRead(arCameraFile), 320, 240);

//AR用のパターンコードを読み出し
NyARCode[] code = new NyARCode[2];
code[0] = NyARCode.loadFromARPattFile(File.OpenRead(arCodeFile[0]), 16, 16);
code[1] = NyARCode.loadFromARPattFile(File.OpenRead(arCodeFile[1]), 16, 16);

//計算モードの設定
//キャプチャを作る
/**************************************************
このコードは、0番目(一番初めに見つかったキャプチャデバイス
を使用するようにされています。
複数のキャプチャデバイスを持つシステムの場合、うまく動作しないかもしれません。
n番目のデバイスを使いたいときには、CaptureDevice cap=cl[0];←ここの0を変えてください。
手動で選択させる方法は、SimpleLiteDirect3Dを参考にしてください。
**************************************************/
CaptureDeviceList cl = new CaptureDeviceList();
NyARToolkitCSUtils.Capture.CaptureDevice cap = cl[0];
cap.SetCaptureListener(this);
cap.PrepareCapture(320, 240, 30);
this.cap = cap;

//ラスタを作る。
this.raster = new DsRgbRaster(cap.video_width, cap.video_height);

this.ar = new NyARDetectMarker(ap, code, new double[] { 80.0, 80.0 }, 2);
this.ar.setContinueMode(false);
}


public void Start()
{
this.cap.StartCapture();
}

public void Stop()
{
this.cap.StopCapture();
}


public WriteableBitmap GetImage()
{
WriteableBitmap bmp;
lock (lockObj)
{
bmp = img.ToWriteableBitmap();
}
return bmp;
}

public void GetMatrix(out Matrix4x4[] matrix, out bool[] isEstimateOk)
{
lock (lockObj)
{
matrix = new Matrix4x4[2];
matrix[0] = GetDeepCopyMatrix4x4(Matrix[0]);
matrix[1] = GetDeepCopyMatrix4x4(Matrix[1]);

isEstimateOk = new bool[2];
if (matrix[0].Equals(Matrix4x4.Identity))
{
isEstimateOk[0] = false;
}
else
{
isEstimateOk[0] = true;
}

if (matrix[1].Equals(Matrix4x4.Identity))
{
isEstimateOk[1] = false;
}
else
{
isEstimateOk[1] = true;
}
}
}


public void OnBuffer(NyARToolkitCSUtils.Capture.CaptureDevice i_sender, double i_sample_time, IntPtr i_buffer, int i_buffer_len)
{
int w = i_sender.video_width;
int h = i_sender.video_height;
int s = w * (i_sender.video_bit_count / 8);

Bitmap b = new Bitmap(w, h, s, PixelFormat.Format32bppRgb, i_buffer);
b.RotateFlip(RotateFlipType.RotateNoneFlipY);
this.raster.setBuffer(i_buffer, i_buffer_len, i_sender.video_vertical_flip);

lock (lockObj)
{
img = b.ToMat();
int detectedMarkerNum = this.ar.detectMarkerLite(this.raster, 100);
int markerId = 0;

for (int marker = 0; marker < detectedMarkerNum; marker++)
{
markerId = ar.getARCodeIndex(marker);

if (markerId == 0)
{
NyARDoubleMatrix44 result_mat = new NyARDoubleMatrix44();
this.ar.getTransmationMatrix(markerId, result_mat);
Matrix[0] = ConvertNyARDoubleMatrix44ToMatrix4x4(result_mat);
NyARSquare square = ar.getSquare(markerId);

var point = new OpenCvSharp.CPlusPlus.Point[4];
for (int i = 0; i < 4; i++)
{
point[i] = new OpenCvSharp.CPlusPlus.Point(square.sqvertex[i].x, square.sqvertex[i].y);
}
img.FillConvexPoly(point, new Scalar(0, 0, 255));
}

if (markerId == 1)
{
NyARDoubleMatrix44 result_mat = new NyARDoubleMatrix44();
this.ar.getTransmationMatrix(markerId, result_mat);
Matrix[1] = ConvertNyARDoubleMatrix44ToMatrix4x4(result_mat);
NyARSquare square = ar.getSquare(markerId);

var point = new OpenCvSharp.CPlusPlus.Point[4];
for (int i = 0; i < 4; i++)
{
point[i] = new OpenCvSharp.CPlusPlus.Point(square.sqvertex[i].x, square.sqvertex[i].y);
}
img.FillConvexPoly(point, new Scalar(255, 0, 0));
}
}

if (detectedMarkerNum == 1)
{
if (markerId == 0)
{
Matrix[1] = Matrix4x4.Identity;
}
else if (markerId == 1)
{
Matrix[0] = Matrix4x4.Identity;
}
}
else if (detectedMarkerNum == 0)
{
Matrix[0] = Matrix4x4.Identity;
Matrix[1] = Matrix4x4.Identity;
}
IsCaptureOk = true;
}
}

private Matrix4x4 ConvertNyARDoubleMatrix44ToMatrix4x4(NyARDoubleMatrix44 m)
{
return new Matrix4x4((float)m.m00,
(float)m.m01,
(float)m.m02,
(float)m.m03,
(float)m.m10,
(float)m.m11,
(float)m.m12,
(float)m.m13,
(float)m.m20,
(float)m.m21,
(float)m.m22,
(float)m.m23,
(float)m.m30,
(float)m.m31,
(float)m.m32,
(float)m.m33);
}

private Matrix4x4 GetDeepCopyMatrix4x4(Matrix4x4 m)
{
return new Matrix4x4(m.M11,
m.M12,
m.M13,
m.M14,
m.M21,
m.M22,
m.M23,
m.M24,
m.M31,
m.M32,
m.M33,
m.M34,
m.M41,
m.M42,
m.M43,
m.M44);
}
}
}