Skip to content

Commit

Permalink
Publish more info via ZeroMQ for Greta project use
Browse files Browse the repository at this point in the history
  • Loading branch information
PhilGauthier committed Feb 24, 2020
1 parent 02f572b commit b3adf29
Showing 1 changed file with 73 additions and 12 deletions.
85 changes: 73 additions & 12 deletions gui/HeadPose-live/MainWindow.xaml.cs
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,8 @@
using ZeroMQ;
using System.Drawing;
using System.Collections.Concurrent;
using FaceAnalyser_Interop;
using System.Globalization;

namespace HeadPoseLive
{
Expand Down Expand Up @@ -122,10 +124,36 @@ public static DateTime CurrentTime
volatile bool running = true;
volatile bool pause = false;

#region Phil
public bool DynamicAUModels { get; set; } = true;
int image_output_size = 112;
public bool MaskAligned { get; set; } = true; // Should the aligned images be masked
string aus = "AU01,AU02,AU04,AU05,AU06,AU07,AU09,AU10,AU12,AU14,AU15,AU17,AU20,AU23,AU25,AU26,AU45";
string aus_r;
string aus_c;
CultureInfo cultureInfo = new CultureInfo("en-GB");
#endregion

private void initAus()
{

StringBuilder sbr = new StringBuilder();
StringBuilder sbc = new StringBuilder();
foreach (string c in aus.Split(','))
{
sbr.Append(c);
sbr.Append("_r,");
sbc.Append(c);
sbc.Append("_c,");
}
aus_r = sbr.ToString().Substring(0, sbr.Length - 1);
aus_c = sbc.ToString().Substring(0, sbc.Length - 1);
}

public void StartExperiment()
{
initAus();
// Inquire more from the user

// Get the entry dialogue now for the subject ID
trial_id = 0;
TextEntryWindow subject_id_window = new TextEntryWindow();
Expand Down Expand Up @@ -283,10 +311,11 @@ public MainWindow()

}

private bool ProcessFrame(CLNF landmark_detector, GazeAnalyserManaged gaze_analyser, FaceModelParameters model_params, RawImage frame, RawImage grayscale_frame, float fx, float fy, float cx, float cy)
private bool ProcessFrame(CLNF landmark_detector, GazeAnalyserManaged gaze_analyser, FaceAnalyserManaged face_analyser, FaceModelParameters model_params, RawImage frame, RawImage grayscale_frame, float fx, float fy, float cx, float cy)
{
bool detection_succeeding = landmark_detector.DetectLandmarksInVideo(frame, model_params, grayscale_frame);
gaze_analyser.AddNextFrame(landmark_detector, detection_succeeding, fx, fy, cx, cy);
face_analyser.AddNextFrame(frame, landmark_detector.CalculateAllLandmarks(), detection_succeeding, false);
return detection_succeeding;

}
Expand Down Expand Up @@ -367,6 +396,7 @@ private void RecordingLoop()
// Capturing and processing the video frame by frame
private void VideoLoop(UtilitiesOF.SequenceReader reader)
{
int frameId = 0;
Thread.CurrentThread.IsBackground = true;

String root = AppDomain.CurrentDomain.BaseDirectory;
Expand All @@ -383,11 +413,14 @@ private void VideoLoop(UtilitiesOF.SequenceReader reader)

CLNF face_model = new CLNF(model_params);
GazeAnalyserManaged gaze_analyser = new GazeAnalyserManaged();
FaceAnalyserManaged face_analyser = new FaceAnalyserManaged
(AppDomain.CurrentDomain.BaseDirectory, DynamicAUModels, image_output_size, MaskAligned);

DateTime? startTime = CurrentTime;

var lastFrameTime = CurrentTime;

Stopwatch stopWatch = new Stopwatch();
stopWatch.Start();
while (running)
{

Expand All @@ -396,7 +429,7 @@ private void VideoLoop(UtilitiesOF.SequenceReader reader)
//////////////////////////////////////////////

RawImage frame = reader.GetNextImage();

lastFrameTime = CurrentTime;
processing_fps.AddFrame();

Expand All @@ -408,7 +441,7 @@ private void VideoLoop(UtilitiesOF.SequenceReader reader)
grayFrame.Mirror();
}

bool detectionSucceeding = ProcessFrame(face_model, gaze_analyser, model_params, frame, grayFrame, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());
bool detectionSucceeding = ProcessFrame(face_model, gaze_analyser, face_analyser, model_params, frame, grayFrame, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());

lock (recording_lock)
{
Expand All @@ -429,6 +462,7 @@ private void VideoLoop(UtilitiesOF.SequenceReader reader)
Tuple<float, float> gaze_angle = new Tuple<float, float>(0, 0);
var visibilities = face_model.GetVisibilities();
double scale = face_model.GetRigidParams()[0];


if (detectionSucceeding)
{
Expand All @@ -448,14 +482,18 @@ private void VideoLoop(UtilitiesOF.SequenceReader reader)
if (reset)
{
face_model.Reset();

reset = false;
frameId = 0;
}

// Visualisation updating
try
{
Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() =>
{
Thread.CurrentThread.CurrentCulture = cultureInfo;
Thread.CurrentThread.CurrentUICulture = cultureInfo;
if (latest_img == null)
latest_img = frame.CreateWriteableBitmap();
Expand Down Expand Up @@ -544,21 +582,43 @@ private void VideoLoop(UtilitiesOF.SequenceReader reader)
{
eye_landmark_points.Add(new System.Windows.Point(p.Item1, p.Item2));
}
webcam_img.OverlayEyePoints.Add(eye_landmark_points);
webcam_img.GazeLines.Add(gaze_lines);
// Publish the information for other applications
String str_head_pose = String.Format("{0}:{1:F2}, {2:F2}, {3:F2}, {4:F2}, {5:F2}, {6:F2}", "HeadPose", pose[0], pose[1], pose[2],
if (frameId % 100 == 0)
zero_mq_socket.Send(new ZFrame(
String.Format("HEADER:frame_id,face_id,timestamp,confidence,success,head_x,head_y,head_z,head_rx,head_ry,head_rz,gaze_0_x,gaze_0_y,gaze_0_z,gaze_1_x,gaze_1_y,gaze_1_z,gaze_angle_x,gaze_angle_y,{0},{1}",
aus_r, aus_c), Encoding.UTF8));
String fid = String.Format("{0},-1,{1:F2},{2:F2},{3}", frameId, stopWatch.Elapsed.TotalSeconds, confidence, detectionSucceeding?"1":"0");
String str_head_pose = String.Format(",{0:F2},{1:F2},{2:F2},{3:F2},{4:F2},{5:F2}", pose[0], pose[1], pose[2],
pose[3] * 180 / Math.PI, pose[4] * 180 / Math.PI, pose[5] * 180 / Math.PI);
zero_mq_socket.Send(new ZFrame(str_head_pose, Encoding.UTF8));
String str_gaze = String.Format("{0}:{1:F2}, {2:F2}", "GazeAngle", gaze_angle.Item1 * (180.0 / Math.PI), gaze_angle.Item2 * (180.0 / Math.PI));
var gazeCams = gaze_analyser.GetGazeCamera();
String str_gazePos1 = String.Format(",{0:F2},{1:F2},{2:F2}", gazeCams.Item1.Item1, gazeCams.Item1.Item2, gazeCams.Item1.Item3);
String str_gazePos2 = String.Format(",{0:F2},{1:F2},{2:F2}", gazeCams.Item2.Item1, gazeCams.Item2.Item2, gazeCams.Item2.Item3);
String str_gaze = String.Format(",{0:F2},{1:F2}", gaze_angle.Item1 * (180.0 / Math.PI), gaze_angle.Item2 * (180.0 / Math.PI));
StringBuilder strBuilder =new StringBuilder();
strBuilder.Append("DATA:");
strBuilder.Append(fid);
strBuilder.Append(str_head_pose);
strBuilder.Append(str_gazePos1);
strBuilder.Append(str_gazePos2);
strBuilder.Append(str_gaze);
Dictionary<string, double> dic = face_analyser.GetCurrentAUsReg();
foreach (string key in aus.Split(',')) {
strBuilder.AppendFormat(",{0:F2}", dic[key]);
}
dic = face_analyser.GetCurrentAUsClass();
foreach (string key in aus.Split(',')) {
strBuilder.AppendFormat(",{0:F2}", dic[key]);
}
zero_mq_socket.Send(new ZFrame(str_gaze, Encoding.UTF8));
zero_mq_socket.Send(new ZFrame(strBuilder.ToString(), Encoding.UTF8));
}
frameId++;
}));

while (running & pause)
Expand All @@ -574,6 +634,7 @@ private void VideoLoop(UtilitiesOF.SequenceReader reader)
break;
}
}
stopWatch.Stop();
reader.Close();
System.Console.Out.WriteLine("Thread finished");
}
Expand Down

0 comments on commit b3adf29

Please sign in to comment.