control motor using face camera
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Windows.Forms;
using Emgu.CV;
using Emgu.CV.Structure;
using Emgu.CV.CvEnum;
using System.IO;
using System.Diagnostics;
using System.Media;
using System.Net.Sockets;
using DirectShowLib;
using System.IO.Ports;
namespace MultiFaceRec
{
public partial class FrmPrincipal : Form
{
//Declararation of all variables, vectors and haarcascades
Image<Bgr, Byte> currentFrame;
Capture grabber;
HaarCascade face;
HaarCascade eye;
MCvFont font = new MCvFont(FONT.CV_FONT_HERSHEY_TRIPLEX, 0.5d, 0.5d);
Image<Gray, byte> result, TrainedFace = null;
Image<Gray, byte> gray = null;
List<Image<Gray, byte>> trainingImages = new List<Image<Gray, byte>>();
List<string> labels= new List<string>();
List<string> NamePersons = new List<string>();
int ContTrain, NumLabels, t;
string name, names = null;
bool CapturingProcess = false;
TcpClient _tcpClient = null;
bool CapRunning = false;
private int _CameraIndex;
bool CamAuto = false;
bool ConnectAuto = false;
string revision = "3.7.14";
public FrmPrincipal()
{
InitializeComponent();
//Load haarcascades for face detection
face = new HaarCascade("haarcascade_frontalface_default.xml");
//eye = new HaarCascade("haarcascade_eye.xml");
try
{
//Load of previus trainned faces and labels for each image
string Labelsinfo = File.ReadAllText(Application.StartupPath + "/TrainedFaces/TrainedLabels.txt");
string[] Labels = Labelsinfo.Split('%');
NumLabels = Convert.ToInt16(Labels[0]);
ContTrain = NumLabels;
string LoadFaces;
for (int tf = 1; tf < NumLabels+1; tf++)
{
LoadFaces = "face" + tf + ".bmp";
trainingImages.Add(new Image<Gray, byte>(Application.StartupPath + "/TrainedFaces/" + LoadFaces));
labels.Add(Labels[tf]);
}
}
catch(Exception e)
{
//MessageBox.Show(e.ToString());
MessageBox.Show("No faces have been trained. Please add at least a face (train with the Add face Button).", "EZ-Face Notice", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
}
}
private void button1_Click(object sender, EventArgs e)
{
//Set the camera number to the one selected via combo box
//This method is no longer used, DirectShow to display device name is now used
//int CamNumber = -1;
//CamNumber = int.Parse(cbCamIndex.Text);
//This is for reading faces from a video file, for testing only at this time
//String sFileName = @"c:\test.mp4"; //this work with new opencv_ffmpeg290.dll in the bin folder
//grabber = new Capture(sFileName); //this works, but crashes the app once movie stops
//Initialize the capture device
grabber = new Capture(_CameraIndex);
grabber.QueryFrame();
//Initialize the FrameGraber event
Application.Idle += new EventHandler(FrameGrabber);
button1.Enabled = false;
CapturingProcess = true;
btn_stop_capture.Enabled = true;
groupBox1.Enabled = true;
CapRunning = true;
}
private void button2_Click(object sender, System.EventArgs e)
{
try
{
//Trained face counter
ContTrain = ContTrain + 1;
//Get a gray frame from capture device
gray = grabber.QueryGrayFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
//Face Detector
MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
face,
1.2,
10,
Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
new Size(20, 20));
//Action for each element detected
foreach (MCvAvgComp f in facesDetected[0])
{
TrainedFace = currentFrame.Copy(f.rect).Convert<Gray, byte>();
break;
}
//resize face detected image for force to compare the same size with the
//test image with cubic interpolation type method
TrainedFace = result.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
trainingImages.Add(TrainedFace);
labels.Add(textBox1.Text);
//Show face added in gray scale
imageBox1.Image = TrainedFace;
//Write the number of triained faces in a file text for further load
File.WriteAllText(Application.StartupPath + "/TrainedFaces/TrainedLabels.txt", trainingImages.ToArray().Length.ToString() + "%");
//Write the labels of triained faces in a file text for further load
for (int i = 1; i < trainingImages.ToArray().Length + 1; i++)
{
trainingImages.ToArray()[i - 1].Save(Application.StartupPath + "/TrainedFaces/face" + i + ".bmp");
File.AppendAllText(Application.StartupPath + "/TrainedFaces/TrainedLabels.txt", labels.ToArray()[i - 1] + "%");
}
MessageBox.Show(textBox1.Text + "´s face detected and added.", "Training OK", MessageBoxButtons.OK, MessageBoxIcon.Information);
}
catch
{
MessageBox.Show("Enable the face detection first.", "Training Fail", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
}
}
void FrameGrabber(object sender, EventArgs e)
{
label3.Text = "0";
//label4.Text = "";
NamePersons.Add("");
//This is where the app most often encounters critical errors
//Get the current frame form capture device
currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
//Convert it to Grayscale
gray = currentFrame.Convert<Gray, Byte>();
//Face Detector
MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
face,
1.2,
10,
Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
new Size(20, 20));
//Action for each element detected
foreach (MCvAvgComp f in facesDetected[0])
{
t = t + 1;
result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
//draw the face detected in the 0th (gray) channel with blue color
currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);
if (trainingImages.ToArray().Length != 0)
{
//TermCriteria for face recognition with numbers of trained images like maxIteration
MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);
//Eigen face recognizer
EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
trainingImages.ToArray(),
labels.ToArray(),
3000,
ref termCrit);
name = recognizer.Recognize(result);
//Draw the label for each face detected and recognized
currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));
}
NamePersons[t-1] = name;
NamePersons.Add("");
//Set the number of faces detected on the scene
label3.Text = facesDetected[0].Length.ToString();
/*
//Set the region of interest on the faces
gray.ROI = f.rect;
MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(
eye,
1.1,
10,
Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
new Size(20, 20));
gray.ROI = Rectangle.Empty;
foreach (MCvAvgComp ey in eyesDetected[0])
{
Rectangle eyeRect = ey.rect;
eyeRect.Offset(f.rect.X, f.rect.Y);
currentFrame.Draw(eyeRect, new Bgr(Color.Blue), 2);
}
*/
}
t = 0;
//Names concatenation of persons recognized
for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
{
names = names + NamePersons[nnn] + ", ";
}
//Show the faces procesed and recognized
imageBoxFrameGrabber.Image = currentFrame;
label4.Text = names;
//This logs the faces recognized
if (String.IsNullOrEmpty(names))
{
textBox2.Text = "off";
string m = comboBox1.Text.ToString();
string s = textBox2.Text.ToString();
sErial(m, s);
}
else
{
File.AppendAllText(Application.StartupPath + "/RecognitionLog/facelog.txt",
names + DateTime.Now.ToString() + Environment.NewLine);
textBox2.Text = "on";
string m = comboBox1.Text.ToString();
string s = textBox2.Text.ToString();
sErial(m, s);
}
//
//This sends the name automatically - this needs to be enabled or disabled
//
tbX.Text = "\"" + names + "\"";
btnSetX.PerformClick(); //sends the informations to EZ-Builder
//btngetX.PerformClick(); //gets the information from EZ-Builder - this works by only until recognition stops
//This clears the name value for the next face to be recognized
names = "";
//Clear the list(vector) of names
NamePersons.Clear();
}
private void FrmPrincipal_Load(object sender, EventArgs e)
{
this.Text = "EZ-Face " + revision;
//cbCamIndex.Items.AddRange(Camera.GetVideoCaptureDevices());
tbLog.Visible = false;
groupBox1.Enabled = false;
loadUserSettingsToolStripMenuItem.PerformClick();
timer1.Start();
timer2.Start();
if (File.Exists(Application.StartupPath + "/RecognitionLog/facelog.txt"))
{
var fileName = (Application.StartupPath + "/RecognitionLog/facelog.txt");
FileInfo fi = new FileInfo(fileName);
var size = fi.Length;
lb_facename_file.Text = "Face Log File size: " + size;
}
if (CamAuto == true)
{
lb_autorun.Text = "Enabled";
button1.PerformClick();
}
else
{
lb_autorun.Text = "Disabled";
}
if (ConnectAuto == true)
{
lb_autoconnect.Text = "Enabled";
btnConnect.PerformClick();
}
else
{
lb_autoconnect.Text = "Disabled";
}
}
private void Log(object txt, params object[] vals)
{
tbLog.AppendText(string.Format(txt.ToString(), vals));
tbLog.AppendText(Environment.NewLine);
}
private void btnConnect_Click(object sender, EventArgs e)
{
tbLog.Visible = true;
try
{
if (_tcpClient != null)
disconnect();
else
connect();
}
catch (Exception ex)
{
Log("Error performing connection action: {0}", ex);
}
}
private void disconnect()
{
if (_tcpClient != null)
_tcpClient.Close();
_tcpClient = null;
btnConnect.Text = "Connect";
Log("Disconnected");
tbLog.Visible = false;
}
private void connect()
{
int port = Convert.ToInt32(tbPort.Text);
Log("Attempting Connection to {0}:{1}", tbAddress.Text, port);
_tcpClient = new TcpClient();
IAsyncResult ar = _tcpClient.BeginConnect(tbAddress.Text, port, null, null);
System.Threading.WaitHandle wh = ar.AsyncWaitHandle;
try
{
if (!ar.AsyncWaitHandle.WaitOne(TimeSpan.FromSeconds(3), false))
{
_tcpClient.Close();
throw new TimeoutException();
}
_tcpClient.EndConnect(ar);
}
finally
{
wh.Close();
}
_tcpClient.NoDelay = true;
_tcpClient.ReceiveTimeout = 2000;
_tcpClient.SendTimeout = 2000;
btnConnect.Text = "Disconnect";
Log("Connected");
Log(readResponseLine());
}
private string sendCommand(string cmd)
{
try
{
Log("Sending: {0}", cmd);
clearInputBuffer();
_tcpClient.Client.Send(System.Text.Encoding.ASCII.GetBytes(cmd + Environment.NewLine));
return readResponseLine(); //original exampled used this: Log(readResponseLine());
}
catch (Exception ex)
{
Log("Command Error: {0}", ex);
disconnect();
}
return string.Empty;
}
/// <summary>
/// Clears any data in the tcp incoming buffer by reading the buffer into an empty byte array.
/// </summary>
private void clearInputBuffer()
{
if (_tcpClient.Available > 0)
_tcpClient.GetStream().Read(new byte[_tcpClient.Available], 0, _tcpClient.Available);
}
/// <summary>
/// Blocks and waits for a string of data to be sent. The string is terminated with a \r\n
/// </summary>
private string readResponseLine()
{
string str = string.Empty;
do
{
byte[] tmpBuffer = new byte[1024];
_tcpClient.GetStream().Read(tmpBuffer, 0, tmpBuffer.Length);
str += System.Text.Encoding.ASCII.GetString(tmpBuffer);
} while (!str.Contains(Environment.NewLine));
// Return only the first line if multiple lines were received
return str.Substring(0, str.IndexOf(Environment.NewLine));
}
private void btnSetX_Click(object sender, EventArgs e)
{
sendCommand(string.Format("$FaceName = {0}", tbX.Text));
}
void sErial(string Port_name, string data_Send)
{
SerialPort sp = new SerialPort(Port_name, 9600, Parity.None, 8, StopBits.One);
sp.Open();
sp.Write(data_Send);
sp.Close();
}
private void btn_stop_capture_Click(object sender, EventArgs e)
{
{
if (CapturingProcess == true)
{
Application.Idle -= FrameGrabber;
grabber.Dispose();
//Application.Exit(); //this closes the application
//CapturingProcess = false;
//playorpause.Text = "Play";
}
else
{
Application.Idle += FrameGrabber;
//CapturingProcess = true;
//playorpause.Text = "Pause";
}
}
button1.Enabled = true;
btn_stop_capture.Enabled = false;
groupBox1.Enabled = false;
imageBoxFrameGrabber.Image = null; //sets the image to blank
imageBox1.Image = null; //sets image to blank
}
private void deleteLearnedFacesToolStripMenuItem_Click(object sender, EventArgs e)
{
DialogResult d = MessageBox.Show("Are you sure you want to delete all learned faces?", "Question", MessageBoxButtons.YesNo, MessageBoxIcon.Question);
if (d == DialogResult.Yes)
{
if (CapRunning = true)
{
btn_stop_capture.PerformClick();
}
Array.ForEach(Directory.GetFiles(Application.StartupPath + "/TrainedFaces"), File.Delete);
button1.Enabled = false;
DialogResult b = MessageBox.Show("You must close EZ-Face and re-open it for changes to take effect.", "EZ-Face Notice", MessageBoxButtons.OK, MessageBoxIcon.Asterisk);
if (b == DialogResult.OK)
{
Application.Exit();
}
}
else if (d == DialogResult.No)
{
//Do nothing
}
}
private void aboutToolStripMenuItem1_Click(object sender, EventArgs e)
{
var aboutForm = new About();
aboutForm.revision = revision;
aboutForm.Show();
}
private void j2RScientificComToolStripMenuItem_Click(object sender, EventArgs e)
{
System.Diagnostics.Process.Start("http://www.J2RScientific.com");
}
private void instructionsToolStripMenuItem_Click(object sender, EventArgs e)
{
if (File.Exists(@"C:\BotBrain\EZ-Face\Resources\ReadMe.txt"))
{
System.Diagnostics.Process.Start(@"C:\BotBrain\EZ-Face\Resources\ReadMe.txt");
}
else
{
MessageBox.Show("I'm sorry. The ReadMe.txt file could not be found.", "EZ-Face Notice", MessageBoxButtons.OK, MessageBoxIcon.Asterisk);
}
}
private void deleteLogOfDetectedFacesToolStripMenuItem_Click(object sender, EventArgs e)
{
DialogResult f = MessageBox.Show("Are you sure you want to delete facelog.txt file?", "Question", MessageBoxButtons.YesNo, MessageBoxIcon.Question);
if (f == DialogResult.Yes)
{
if (File.Exists(Application.StartupPath + "/RecognitionLog/facelog.txt"))
{
File.Delete(Application.StartupPath + "/RecognitionLog/facelog.txt");
}
}
}
private void viewSavedFacesToolStripMenuItem_Click(object sender, EventArgs e)
{
Process.Start(Application.StartupPath + "/TrainedFaces");
}
private void viewLogOfDetectedFacesToolStripMenuItem_Click(object sender, EventArgs e)
{
if (File.Exists(Application.StartupPath + "/RecognitionLog/facelog.txt"))
{
System.Diagnostics.Process.Start(Application.StartupPath + "/RecognitionLog/facelog.txt");
}
else
{
MessageBox.Show("I'm sorry. The facelog.txt file could not be found.", "EZ-Face Notice", MessageBoxButtons.OK, MessageBoxIcon.Asterisk);
}
}
private void saveSettingsToolStripMenuItem_Click(object sender, EventArgs e)
{
string httpAddress = tbAddress.Text;
string portAddress = tbPort.Text;
if (File.Exists(Application.StartupPath + "/Settings/user_settings.txt"))
{
File.Delete(Application.StartupPath + "/Settings/user_settings.txt");
}
File.AppendAllText(Application.StartupPath + "/Settings/user_settings.txt", httpAddress.ToString() + Environment.NewLine);
File.AppendAllText(Application.StartupPath + "/Settings/user_settings.txt", portAddress.ToString() + Environment.NewLine);
File.AppendAllText(Application.StartupPath + "/Settings/user_settings.txt", _CameraIndex.ToString() + Environment.NewLine);
File.AppendAllText(Application.StartupPath + "/Settings/user_settings.txt", CamAuto.ToString() + Environment.NewLine);
File.AppendAllText(Application.StartupPath + "/Settings/user_settings.txt", ConnectAuto.ToString() + Environment.NewLine);
MessageBox.Show("Your user settings were saved.", "EZ-Face Notice", MessageBoxButtons.OK, MessageBoxIcon.Asterisk);
}
private void loadUserSettingsToolStripMenuItem_Click(object sender, EventArgs e)
{
if (File.Exists(Application.StartupPath + "/Settings/user_settings.txt"))
{
string[] lines = System.IO.File.ReadAllLines(Application.StartupPath + "/Settings/user_settings.txt");
for (int i = 0; i < lines.Length; i++)
{
string line = lines[i];
tbAddress.Text = lines[0];
tbPort.Text = lines[1];
// _CameraIndex = lines[2];
//int _CameraIndex = Int32.Parse(lines[2]);
_CameraIndex = Int32.Parse(lines[2]);
CamAuto = bool.Parse(lines[3]);
ConnectAuto = bool.Parse(lines[4]);
}
}
else
{
MessageBox.Show("I'm sorry. The user_settings.txt file could not be found.", "EZ-Face Notice", MessageBoxButtons.OK, MessageBoxIcon.Asterisk);
}
}
private void timer1_Tick(object sender, EventArgs e)
{
if (File.Exists(Application.StartupPath + "/RecognitionLog/facelog.txt"))
{
var fileName = (Application.StartupPath + "/RecognitionLog/facelog.txt");
FileInfo fi = new FileInfo(fileName);
var size = fi.Length;
lb_facename_file.Text = "Face Log File size: " + size;
if (size > 1000000) //if file is greater then 1mb it will be deleted
{
File.Delete(Application.StartupPath + "/RecognitionLog/facelog.txt");
}
}
}
private void cbCamIndex_SelectedIndexChanged(object sender, EventArgs e)
{
//-> Get the selected item in the combobox
KeyValuePair<int, string> SelectedItem = (KeyValuePair<int, string>)cbCamIndex.SelectedItem;
//-> Assign selected cam index to defined var
_CameraIndex = SelectedItem.Key;
}
private void btn_refresh_camerlist_Click(object sender, EventArgs e)
{
//-> Create a List to store for ComboCameras
List<KeyValuePair<int, string>> ListCamerasData = new List<KeyValuePair<int, string>>();
//-> Find systems cameras with DirectShow.Net dll
DsDevice[] _SystemCamereas = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice);
int _DeviceIndex = 0;
foreach (DirectShowLib.DsDevice _Camera in _SystemCamereas)
{
ListCamerasData.Add(new KeyValuePair<int, string>(_DeviceIndex, _Camera.Name));
_DeviceIndex++;
}
//-> clear the combobox
cbCamIndex.DataSource = null;
cbCamIndex.Items.Clear();
//-> bind the combobox
cbCamIndex.DataSource = new BindingSource(ListCamerasData, null);
cbCamIndex.DisplayMember = "Value";
cbCamIndex.ValueMember = "Key";
//DirectShowLib-2005 must be added as a reference in the bin folder
}
private void setCameraToAutoRunToolStripMenuItem_Click(object sender, EventArgs e)
{
CamAuto = true;
lb_autorun.Text = "Enabled";
MessageBox.Show("Remember, please make sure all user settings are set to the correct values - then use the File/Save User Settings feature.", "EZ-Face Notice", MessageBoxButtons.OK, MessageBoxIcon.Asterisk);
}
private void removeCameraAutoRunToolStripMenuItem_Click(object sender, EventArgs e)
{
CamAuto = false;
lb_autorun.Text = "Disabled";
MessageBox.Show("Remember, please make sure all user settings are set to the correct values - then use the File/Save User Settings feature.", "EZ-Face Notice", MessageBoxButtons.OK, MessageBoxIcon.Asterisk);
}
private void btngetX_Click(object sender, EventArgs e)
{
string retVal = sendCommand("print($EZfaceCMD)");
tb_getX.Text = retVal;
Log(retVal);
if (retVal == "EZfaceSTOP")
{
if (button1.Enabled == false)
{
btn_stop_capture.PerformClick();
}
}
if (retVal == "EZfaceSTART")
{
if (btn_stop_capture.Enabled == false)
{
button1.PerformClick();
}
}
if (retVal == "EZfaceCLOSE")
{
Application.Exit();
}
}
private void setAutoConnectToolStripMenuItem_Click(object sender, EventArgs e)
{
ConnectAuto = true;
lb_autoconnect.Text = "Enabled";
MessageBox.Show("This settings enables auto communication connection upon the application running. Remember, please make sure all user settings are set to the correct values - then use the File/Save User Settings feature.", "EZ-Face Notice", MessageBoxButtons.OK, MessageBoxIcon.Asterisk);
}
private void removeAutoConnectToolStripMenuItem_Click(object sender, EventArgs e)
{
ConnectAuto = false;
lb_autoconnect.Text = "Disabled";
MessageBox.Show("This settings disables auto communication connection upon the application running. Remember, please make sure all user settings are set to the correct values - then use the File/Save User Settings feature.", "EZ-Face Notice", MessageBoxButtons.OK, MessageBoxIcon.Asterisk);
}
private void timer2_Tick(object sender, EventArgs e)
{
btngetX.PerformClick();
}
private void tbPort_TextChanged(object sender, EventArgs e)
{
}
private void tbX_TextChanged(object sender, EventArgs e)
{
}
private void button3_Click(object sender, EventArgs e)
{
string[] ports = SerialPort.GetPortNames();
foreach (string port in ports)
{
comboBox1.Items.Add(port);
}
}
private void label3_Click(object sender, EventArgs e)
{
}
}
}
0 comments:
Post a Comment