You should be redirected shortly, alternatively please click the following link
PassFace: Face Recognition Using Opencv
if (comboBoxAlgorithm.Text == "SURF Feature Extractor")
{
string dataDirectory=Directory.GetCurrentDirectory()+"\\TrainedFaces";
string[] files = Directory.GetFiles(dataDirectory, "*.jpeg", SearchOption.AllDirectories);
foreach (var file in files)
{
richTextBox1.Text += file.ToString();
long recpoints;
Image<Bgr,Byte>sampleImage = new Image<Bgr, Byte>(file);
secondImageBox.Image = sampleImage;
using (Image<Gray, Byte> modelImage = sampleImage.Convert<Gray, Byte>())
using (Image<Gray, Byte> observedImage = image.Convert<Gray, Byte>())
{
Image<Bgr, byte> result = SurfRecognizer.Draw(modelImage, observedImage, out recpoints);
//captureImageBox.Image = observedImage;
if (recpoints > 10)
{
MCvFont f = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_COMPLEX, 1.0, 1.0);
result.Draw("Person Recognited, Welcome", ref f, new Point(40, 40), new Bgr(0, 255, 0));
ImageViewer.Show(result, String.Format(" {0} Points Recognited", recpoints));
}
}
}
|
else if (comboBoxAlgorithm.Text == "EigenFaces")
{
//image._EqualizeHist();
if (eqHisChecked.Checked == true)
{
image._EqualizeHist();
}
var result = eigenFaceRecognizer.Predict(image.Convert<Gray, Byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC));
if (result.Label != -1)
{
image.Draw(eigenlabels[result.Label].ToString(), ref font, new Point(face.X - 2, face.Y - 2), new Bgr(Color.LightGreen));
label6.Text = result.Distance.ToString();
}
}
|
else if (comboBoxAlgorithm.Text == "FisherFaces")
{
if (eqHisChecked.Checked == true)
{
image._EqualizeHist();
}
var result = fisherFaceRecognizer.Predict(image.Convert<Gray, Byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC));
if (result.Label != -1)
{
image.Draw(fisherlabels[result.Label].ToString(), ref font, new Point(face.X - 2, face.Y - 2), new Bgr(Color.LightGreen));
label6.Text = result.Distance.ToString();
}
}
|
else if (comboBoxAlgorithm.Text == "LBPHFaces")
{
if (eqHisChecked.Checked == true)
{
image._EqualizeHist();
}
var result = lbphFaceRecognizer.Predict(image.Convert<Gray, Byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC));
if (result.Label != -1)
{
image.Draw(lbphlabels[result.Label].ToString(), ref font, new Point(face.X - 2, face.Y - 2), new Bgr(Color.LightGreen));
label6.Text = result.Distance.ToString();
}
}
|
Add DataBase Function
private void addDatabaseButton_Click(object sender, EventArgs e)
{
//Take time for save filename
string fileName = textBox1.Text+"_"+DateTime.Now.Day.ToString() + "-" + DateTime.Now.Month.ToString() + "-" + DateTime.Now.Year.ToString()
+ "-" + DateTime.Now.Hour.ToString() + "-" + DateTime.Now.Minute.ToString()+"-" + DateTime.Now.Second.ToString()+".jpeg";
//First The faces in the Image is detected
Image<Bgr, Byte> image = _capture.RetrieveBgrFrame().Resize(400, 300, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
List<Rectangle> faces = new List<Rectangle>();
List<Rectangle> eyes = new List<Rectangle>();
long detectionTime;
DetectFace.Detect(image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, out detectionTime);
foreach (Rectangle face in faces)
{
image.ROI = face;
}
Directory.CreateDirectory("TrainedFaces");
image.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC).ToBitmap().Save("TrainedFaces\\" + fileName);
}
private void comboBoxAlgorithm_SelectedIndexChanged(object sender, EventArgs e)
{
if (comboBoxAlgorithm.Text == "EigenFaces")
{
try
{
string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces";
string[] files = Directory.GetFiles(dataDirectory, "*.jpeg", SearchOption.AllDirectories);
eigenTrainedImageCounter = 0;
foreach (var file in files)
{
Image<Bgr,Byte> TrainedImage=new Image<Bgr, Byte>(file);
if (eqHisChecked.Checked == true)
{
TrainedImage._EqualizeHist();
}
eigenTrainingImages.Add(TrainedImage.Convert<Gray, Byte>());
eigenlabels.Add(fileName(file));
eigenIntlabels.Add(eigenTrainedImageCounter);
eigenTrainedImageCounter++;
richTextBox1.Text += fileName(file)+"\n";
}
/*
//TermCriteria for face recognition with numbers of trained images like maxIteration
MCvTermCriteria termCrit = new MCvTermCriteria(eigenTrainedImageCounter, 0.001);
//Eigen face recognizer
eigenObjRecognizer=new EigenObjectRecognizer(
eigenTrainingImages.ToArray(),
eigenlabels.ToArray(),
3000,
ref termCrit);
*/
eigenFaceRecognizer= new EigenFaceRecognizer(eigenTrainedImageCounter, 3000);
eigenFaceRecognizer.Train(eigenTrainingImages.ToArray(), eigenIntlabels.ToArray());
}
catch (Exception ex)
{
MessageBox.Show(ex.ToString());
MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
}
}
else if (comboBoxAlgorithm.Text == "FisherFaces")
{
try
{
string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces";
string[] files = Directory.GetFiles(dataDirectory, "*.jpeg", SearchOption.AllDirectories);
fisherTrainedImageCounter = 0;
foreach (var file in files)
{
Image<Bgr, Byte> TrainedImage = new Image<Bgr, Byte>(file);
fisherTrainingImages.Add(TrainedImage.Convert<Gray, Byte>());
if (eqHisChecked.Checked == true)
{
TrainedImage._EqualizeHist();
}
fisherlabels.Add(fileName(file));
fisherIntlabels.Add(fisherTrainedImageCounter);
fisherTrainedImageCounter++;
richTextBox1.Text += fileName(file) + "\n";
}
fisherFaceRecognizer = new FisherFaceRecognizer(fisherTrainedImageCounter, 3000);
fisherFaceRecognizer.Train(fisherTrainingImages.ToArray(), fisherIntlabels.ToArray());
}
catch (Exception ex)
{
MessageBox.Show(ex.ToString());
MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
}
}
else if (comboBoxAlgorithm.Text == "LBPHFaces")
{
try
{
string dataDirectory = Directory.GetCurrentDirectory() + "\\TrainedFaces";
string[] files = Directory.GetFiles(dataDirectory, "*.jpeg", SearchOption.AllDirectories);
lbphTrainedImageCounter = 0;
foreach (var file in files)
{
Image<Bgr, Byte> TrainedImage = new Image<Bgr, Byte>(file);
if (eqHisChecked.Checked == true)
{
TrainedImage._EqualizeHist();
}
lbphTrainingImages.Add(TrainedImage.Convert<Gray, Byte>());
lbphlabels.Add(fileName(file));
lbphIntlabels.Add(lbphTrainedImageCounter);
lbphTrainedImageCounter++;
richTextBox1.Text += fileName(file) + "\n";
}
lbphFaceRecognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 123.0);
lbphFaceRecognizer.Train(lbphTrainingImages.ToArray(), lbphIntlabels.ToArray());
}
catch (Exception ex)
{
MessageBox.Show(ex.ToString());
MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
}
}
}
|
Etiketler: Eigenfaces, EmguCV, Face recognition, Fisgerfaces, Görüntü İşleme, opencv