핸드 트래킹을 통해 손의 제스쳐를 이용해 컴퓨터를 제어할 수 있는 프로그램제작을 위한 예광탄을 만들었다.
RGB 컬러모델을 HSV로 변환하여 피부영역을 구하고, 해당 피부영역의 최대 면적의 영역을 남긴다. 해당 영역의 윤곽을 구하고 영역 중의 가장 긴 부분을 손가락으로 판정하였으며, 판정 결과 오차와 정확한 손가락 검출이 불가능하여 손의 무게중심을 이용하는 방법으로 테스트 해봐야 할 것 같다.
class Gestures
{
int TPL_WIDTH = MouseSystem.img_move.Width;
int TPL_HEIGHT = MouseSystem.img_move.Height;
const double THRESHOLD = 0.3;
IplImage tpl, tm;
int object_x0, object_y0;
IplImage imgHSV;
IplImage imgH;
IplImage imgS;
IplImage imgV;
IplImage imgBackProjection;
IplImage imgFlesh;
IplImage imgHull;
IplImage imgDefect;
IplImage imgContour;
IplImage[] hsvPlanes = new IplImage[3];
IplImage imgclick;
IplImage imgmove;
CvMemStorage storage = new CvMemStorage();
CvHistogram hist;
CvPoint p1, p2;
public Gestures()
{
}
unsafe Point getCenter(IplImage img)
{
float []mask = new float[3];
IplImage dist;
IplImage dist8u;
IplImage dist32s;
int max;
Point p = new Point();
byte* ptr;
dist = Cv.CreateImage(Cv.GetSize(img), BitDepth.F32, 1);
dist8u = Cv.CloneImage(img);
dist32s = Cv.CreateImage(Cv.GetSize(img), BitDepth.S32, 1);
//거리변환 행렬 생성
mask[0] = 1f;
mask[1] = 1.5f;
Cv.DistTransform(img, dist, DistanceType.User, 3, mask, null);
Cv.ConvertScale(dist, dist, 1000, 0);
Cv.Pow(dist, dist, 0.5);
Cv.ConvertScale(dist, dist32s, 1.0, 0.5);
Cv.AndS(dist32s, Cv.ScalarAll(255), dist32s, null);
Cv.ConvertScale(dist32s, dist8u, 1, 0);
max = 0;
for (int i = max; i < dist8u.Height; i++)
{
int index = i * dist8u.WidthStep;
for (int j = 0; j<dist8u.Width; j++)
{
ptr = (byte*)img.ImageData;
if((char)ptr[index +j] > max)
{
max = (char)dist8u[index + j];
p.X = j;
p.Y = i;
}
}
}
return p;
}
public void Convexty(IplImage imgSrc)
{
try
{
imgHSV = new IplImage(imgSrc.Size, BitDepth.U8, 3);
imgH = new IplImage(imgSrc.Size, BitDepth.U8, 1);
imgS = new IplImage(imgSrc.Size, BitDepth.U8, 1);
imgV = new IplImage(imgSrc.Size, BitDepth.U8, 1);
imgBackProjection = new IplImage(imgSrc.Size, BitDepth.U8, 1);
imgFlesh = new IplImage(imgSrc.Size, BitDepth.U8, 1);
imgHull = new IplImage(imgSrc.Size, BitDepth.U8, 1);
imgDefect = new IplImage(imgSrc.Size, BitDepth.U8, 3);
imgContour = new IplImage(imgSrc.Size, BitDepth.U8, 3);
//imagmove;
int WINDOW_WIDTH = imgSrc.Width;
int WINDOW_HEIGHT = imgSrc.Height;
// RGB -> HSV
Cv.CvtColor(imgSrc, imgHSV, ColorConversion.BgrToHsv);
Cv.Split(imgHSV, imgH, imgS, imgV, null);
hsvPlanes[0] = imgH;
hsvPlanes[1] = imgS;
hsvPlanes[2] = imgV;
// 피부색 영역을 구한다
RetrieveFleshRegion(imgSrc, hsvPlanes, imgBackProjection);
// 최대의 면적의 영역을 남긴다
FilterByMaximalBlob(imgBackProjection, imgFlesh);
Interpolate(imgFlesh);
//템플릿 매칭
//윤곽을 구한다
CvSeq<CvPoint> contours = FindContours(imgFlesh, storage);
if (contours != null)
{
Cv.DrawContours(imgContour, contours, CvColor.Red, CvColor.Green, 0, 3, LineType.AntiAlias);
// 요철을 구한다
CvSeq hull = Cv.ConvexHull2(contours, storage, ConvexHullOrientation.Clockwise, false);
Cv.Copy(imgFlesh, imgHull);
//윤곽을 구합니다.
DrawConvexHull(hull, imgHull);
//DrawConvexHull(hull, imgBackProjection);
// 오목한 상태 결손을 구한다
Cv.Copy(imgContour, imgDefect);
CvSeq<CvConvexityDefect> defect = Cv.ConvexityDefects(contours, hull, storage);
DrawDefects(imgDefect, defect);
}
else
{
return;
}
imgmove = new IplImage(new CvSize(imgBackProjection.Width - MouseSystem.img_move.Width + 1,
imgBackProjection.Height - MouseSystem.img_move.Height + 1), BitDepth.F32, 1);
//MouseSystem.imgResult[0] = imgSrc;
tpl = MouseSystem.img_move;
tm = new IplImage(new CvSize(WINDOW_WIDTH - TPL_WIDTH + 1, WINDOW_HEIGHT - TPL_HEIGHT + 1), BitDepth.F32, 1);
trackObject(imgBackProjection);
//Point result = getCenter(imgBackProjection);
//imgDefect.DrawCircle(new CvPoint(result.X,result.Y), 70, new CvScalar(255,255,255) -1);
MouseSystem.imgResult[0] = imgSrc;
MouseSystem.imgResult[1] = imgBackProjection;
MouseSystem.imgResult[2] = imgDefect;
Cv.WaitKey();
}
catch
{
}
}
private void DrawDefects(IplImage img, CvSeq<CvConvexityDefect> defect)
{
int count = 0;
try
{
foreach (CvConvexityDefect item in defect)
{
p1 = item.Start;
p2 = item.End;
double dist = GetDistance(p1, p2);
CvPoint2D64f mid = GetMidpoint(p1, p2);
img.DrawLine(p1, p2, CvColor.White, 3);
if (count == 1)
{
img.DrawCircle(item.Start, 10, CvColor.Pink, -1);
MouseSystem.MousePointStart = item.Start;
}
else
{
img.DrawCircle(item.Start, 10, CvColor.Green, -1);
}
img.DrawLine(mid, item.DepthPoint, CvColor.White, 1);
count++;
}
MouseSystem.str_ControlsCount = (count - 1).ToString();
switch (count - 1)
{
case 1:
MouseSystem.str_MouseState = "한개";
//NativeMethod.mouse_event(NativeMethod.WM_LBUTTONDOWN, 0, 0, 0, 0);
break;
case 2:
MouseSystem.str_MouseState = "두개";
//NativeMethod.mouse_event(NativeMethod.WM_LBUTTONDOWN, 0, 0, 0, 0);
break;
case 3:
MouseSystem.str_MouseState = "세개";
//NativeMethod.mouse_event(NativeMethod.WM_LBUTTONUP, 0, 0, 0, 0);
break;
case 4:
MouseSystem.str_MouseState = "네개";
//NativeMethod.mouse_event(NativeMethod.WM_LBUTTONUP, 0, 0, 0, 0);
break;
case 5:
MouseSystem.str_MouseState = "다섯개";
//NativeMethod.mouse_event(NativeMethod.WM_LBUTTONUP, 0, 0, 0, 0);
break;
}
}
catch
{
}
}
private CvPoint2D64f GetMidpoint(CvPoint p1, CvPoint p2)
{
return new CvPoint2D64f
{
X = (p1.X + p2.X) / 2.0,
Y = (p1.Y + p2.Y) / 2.0
};
}
private double GetDistance(CvPoint p1, CvPoint p2)
{
return Math.Sqrt(Math.Pow(p1.X - p2.X, 2) + Math.Pow(p1.Y - p2.Y, 2));
}
void DrawConvexHull(CvSeq hull, IplImage img)
{
CvPoint pt0 = Cv.GetSeqElem<Pointer<CvPoint>>(hull, hull.Total - 1).Value.Entity;
for (int i = 0; i < hull.Total; i++)
{
CvPoint pt = Cv.GetSeqElem<Pointer<CvPoint>>(hull, i).Value.Entity;
Cv.Line(img, pt0, pt, new CvColor(255, 255, 255));
pt0 = pt;
}
}
public CvSeq<CvPoint> FindContours(IplImage img, CvMemStorage storage)
{
// 윤곽 추출
CvSeq<CvPoint> contours;
IplImage imgClone = img.Clone();
Cv.FindContours(imgClone, storage, out contours);
if (contours == null)
{
return null;
}
contours = Cv.ApproxPoly(contours, CvContour.SizeOf, storage, ApproxPolyMethod.DP, 3, true);
// 제일 긴 것 같은 윤곽만을 얻는다
CvSeq<CvPoint> max = contours;
for (CvSeq<CvPoint> c = contours; c != null; c = c.HNext)
{
if (max.Total < c.Total)
{
max = c;
}
}
return max;
}
public void Interpolate(IplImage img)
{
Cv.Dilate(img, img, null, 2);
Cv.Erode(img, img, null, 2);
}
private void FilterByMaximalBlob(IplImage imgSrc, IplImage imgDst)
{
using (CvBlobs blobs = new CvBlobs())
using (IplImage imgLabelData = new IplImage(imgSrc.Size, CvBlobLib.DepthLabel, 1))
{
imgDst.Zero();
blobs.Label(imgSrc, imgLabelData);
CvBlob max = blobs[blobs.GreaterBlob()];
if (max == null)
{
return;
}
blobs.FilterByArea(max.Area, max.Area);
blobs.FilterLabels(imgLabelData, imgDst);
MouseSystem.MousePointEnd = new CvPoint((int)max.Centroid.X, (int)max.MinY);
}
}
private void RetrieveFleshRegion(IplImage imgSrc, IplImage[] hsvPlanes, IplImage imgDst)
{
int[] histSize = new int[] { 30, 32 };
float[] hRanges = { 0.0f, 20f };
float[] sRanges = { 50f, 255f };
float[][] ranges = { hRanges, sRanges };
imgDst.Zero();
hist = new CvHistogram(histSize, HistogramFormat.Array, ranges, true);
hist.Calc(hsvPlanes, false, null);
float minValue, maxValue;
hist.GetMinMaxValue(out minValue, out maxValue);
hist.Normalize(imgSrc.Width * imgSrc.Height * 255 / maxValue);
Cv.CalcBackProject(hsvPlanes, imgDst, hist);
}
private void trackObject(IplImage img_src)
{
int WINDOW_WIDTH = img_src.Width;
int WINDOW_HEIGHT = img_src.Height;
try
{
CvPoint minloc, maxloc;
double minval, maxval;
int win_x0 = object_x0 - ((WINDOW_WIDTH - TPL_WIDTH) / 2);
int win_y0 = object_y0 - ((WINDOW_HEIGHT - TPL_HEIGHT) / 2);
Cv.SetImageROI(img_src, new CvRect(win_x0, win_y0, WINDOW_WIDTH, WINDOW_HEIGHT));
Cv.MatchTemplate(img_src, tpl, tm, MatchTemplateMethod.SqDiffNormed);
Cv.MinMaxLoc(tm, out minval, out maxval, out minloc, out maxloc);
Cv.ResetImageROI(img_src);
if (minval <= THRESHOLD)
{
object_x0 = win_x0 + minloc.X;
object_y0 = win_y0 + minloc.Y;
Cv.DrawRect(img_src, new CvRect(object_x0, object_y0, TPL_WIDTH, TPL_HEIGHT), CvColor.Red, 2);
}
}
catch
{
}
}
}
'Programing > OpenCV' 카테고리의 다른 글
수직에지기반 차량인식 (0) | 2016.11.30 |
---|---|
제스쳐 인식 (0) | 2016.11.30 |
플러드필 (0) | 2016.11.30 |
Opencv C# 모폴로지 (0) | 2016.11.30 |
영상 모폴로지 자체 커널 만들기 (0) | 2016.11.30 |