Browse Source

Merge branch 'master' of https://git.tk.informatik.tu-darmstadt.de/etri-smartspaces

Anton Rohr 10 years ago
parent
commit
dafa80ae19

+ 80 - 0
bbiwarg/BBWIWARG.cs

@@ -15,24 +15,79 @@ using bbiwarg.TUIO;
 
 namespace bbiwarg
 {
+    /// <summary>
+    /// Main class which creates and starts the input, tuio and window objects.
+    /// </summary>
     class BBIWARG
     {
+        /// <summary>
+        /// the input provider
+        /// </summary>
         private InputProvider inputProvider;
+
+        /// <summary>
+        /// the input handler
+        /// </summary>
         private InputHandler inputHandler;
 
+
+        /// <summary>
+        /// true iff the tuio server is enabled
+        /// </summary>
         private bool tuioEnabled;
+
+        /// <summary>
+        /// the ip address for the tuio server
+        /// </summary>
         private String tuioIP;
+
+        /// <summary>
+        /// the port for the tuio server
+        /// </summary>
         private Int16 tuioPort;
+
+        /// <summary>
+        /// the tuio communicator
+        /// </summary>
         private TuioCommunicator tuioCommunicator;
 
+
+        /// <summary>
+        /// true iff the debug window is enabled
+        /// </summary>
         private bool debugWindowEnabled;
+
+        /// <summary>
+        /// the debug window
+        /// </summary>
         private DebugWindow debugWindow;
+
+        /// <summary>
+        /// the thread the debug window runs in
+        /// </summary>
         private Thread debugWindowThread;
 
+
+        /// <summary>
+        /// true iff the glasses window is enabled
+        /// </summary>
         private bool glassesWindowEnabled;
+
+        /// <summary>
+        /// the thread the glasses window runs in
+        /// </summary>
         private Thread glassesWindowThread;
+
+        /// <summary>
+        /// the glasses window
+        /// </summary>
         private GlassesWindow glassesWindow;
 
+
+        /// <summary>
+        /// Starts the program.
+        /// </summary>
+        /// <param name="args"></param>
         static void Main(string[] args)
         {
             Console.SetWindowSize(Parameters.ConsoleWidth, Parameters.ConsoleHeight);
@@ -41,6 +96,10 @@ namespace bbiwarg
             program.run();
         }
 
+        /// <summary>
+        /// Parses the command line arguments and depending on them creates the input, tuio and window objects.
+        /// </summary>
+        /// <param name="args">command line arguments</param>
         public BBIWARG(string[] args)
         {
             handleArgs(args);
@@ -73,11 +132,17 @@ namespace bbiwarg
 
         }
 
+        /// <summary>
+        /// Runs the main program.
+        /// </summary>
         public void run()
         {
             inputProvider.start();
         }
 
+        /// <summary>
+        /// Creates the input provider.
+        /// </summary>
         private void createInputProvider()
         {
             if (Parameters.InputSource == InputType.Movie)
@@ -86,18 +151,29 @@ namespace bbiwarg
                 inputProvider = new InputProvider();
         }
 
+        /// <summary>
+        /// Runs the debug window in its own thread.
+        /// </summary>
         private void debugWindowThreadStart()
         {
             debugWindow = new DebugWindow(inputProvider, inputHandler, Parameters.DebugWindowTitle, Parameters.DebugWindowUpdateIntervall);
             Application.Run(debugWindow);
         }
 
+        /// <summary>
+        /// Runs the glasses window in its own thread.
+        /// </summary>
         private void glassesWindowThreadStart()
         {
             glassesWindow = new GlassesWindow(inputProvider, inputHandler, Parameters.GlassesWindowTitle, Screen.AllScreens[1], Parameters.GlassesWindowUpdateInterval);
             Application.Run(glassesWindow);
         }
 
+        /// <summary>
+        /// Handles the event that the device has started by starting the enabled windows.
+        /// </summary>
+        /// <param name="sender">event sender</param>
+        /// <param name="e">event arguments</param>
         private void handleDeviceStartedEvent(object sender, EventArgs e)
         {
             if (debugWindowEnabled)
@@ -107,6 +183,10 @@ namespace bbiwarg
                 glassesWindowThread.Start();
         }
 
+        /// <summary>
+        /// Parses the command line arguments and sets parameters depending on them.
+        /// </summary>
+        /// <param name="args">command line arguments</param>
         private void handleArgs(String[] args)
         {
             if (args.Length > 0)

+ 3 - 1
bbiwarg/Images/EdgeImage.cs

@@ -33,7 +33,9 @@ namespace bbiwarg.Images
         public EdgeImage(DepthImage depthImage)
         {
             Size = depthImage.Size;
-            Image = depthImage.Image.ConvertScale<byte>(255f / (depthImage.MaxDepth - depthImage.MinDepth), 0).Canny(Parameters.EdgeImageCannyStartThreshold, Parameters.EdgeImageCannyLinkingThreshold, Parameters.EdgeImageCannySize).ThresholdBinary(new Gray(0), new Gray(1));
+            Image = depthImage.Image.ConvertScale<byte>(255f / (depthImage.MaxDepth - depthImage.MinDepth), 0).
+                Canny(Parameters.EdgeImageCannyStartThreshold, Parameters.EdgeImageCannyLinkingThreshold, Parameters.EdgeImageCannySize).
+                ThresholdBinary(new Gray(0), new Gray(1));
             RoughImage = Image.Dilate(Parameters.EdgeImageRoughNumDilationIterations);
         }
 

+ 1 - 1
bbiwarg/Output/DebugOutput/TouchEventVisualizer.cs

@@ -17,7 +17,7 @@ namespace bbiwarg.Output.DebugOutput
     class TouchEventVisualizer
     {
         /// <summary>
-        /// used to prevent the simultaneous access to <see cref="handleNewFrameData"/> and <see cref="getOutputImage"/> from different threads
+        /// used to prevent running <see cref="handleNewFrameData"/> and <see cref="getOutputImage"/> simultaneously from different threads
         /// </summary>
         private Object sync;
 

+ 503 - 28
bbiwarg/Parameters.cs

@@ -10,59 +10,192 @@ using bbiwarg.Utility;
 
 namespace bbiwarg
 {
+    /// <summary>
+    /// type of input source
+    /// </summary>
     public enum InputType
     {
         Camera,
         Movie
     }
 
-    class Parameters
+    /// <summary>
+    /// Defines all parameters used in the whole program.
+    /// </summary>
+    static class Parameters
     {
-        // console
+        #region console
+        
+        /// <summary>
+        /// with of the console in monospace characters
+        /// </summary>
         public static readonly int ConsoleWidth = 90;
+
+        /// <summary>
+        /// height of the console in monospace characters
+        /// </summary>
         public static readonly int ConsoleHeight = 30;
+        
+        #endregion
+
+
+        #region input
+        
+        /// <summary>
+        /// the input source type
+        /// </summary>
+        public static readonly InputType InputSource = InputType.Camera;
 
-        // input
-        public static readonly InputType InputSource = InputType.Movie;
+        /// <summary>
+        /// path to the movie file used as input source
+        /// </summary>
         public static readonly String InputMoviePath = "..\\..\\videos\\touch\\4.skv";
 
-        // Logger
+        #endregion
+
+
+        #region Logger
+
+        /// <summary>
+        /// true iff the timer output should be shown
+        /// </summary>
         public static readonly bool LoggerTimerOutputEnabled = true;
+
+        /// <summary>
+        /// bitfield which specifies which subjects should be logged
+        /// </summary>
         public static readonly LogSubject LoggerEnabledSubjects = LogSubject.None;
 
-        // Debug window
+        #endregion
+
+
+        #region DebugWindow
+
+        /// <summary>
+        /// true iff the debug window is enabled
+        /// </summary>
         public static readonly bool DebugWindowEnabled = true;
+
+        /// <summary>
+        /// the update interval for the debug window
+        /// </summary>
         public static readonly int DebugWindowUpdateIntervall = 1000 / 30; // 30fps
+
+        /// <summary>
+        /// the title of the debug window
+        /// </summary>
         public static readonly String DebugWindowTitle = "BBIWARG - DebugOutput";
 
-        // glasses window
+        #endregion
+
+
+        #region GlassesWindow
+
+        /// <summary>
+        /// true iff the glasses window is enabled
+        /// </summary>
         public static readonly bool GlassesWindowEnabled = false;
+
+        /// <summary>
+        /// the update interval for the glasses window
+        /// </summary>
         public static readonly int GlassesWindowUpdateInterval = 1000 / 30; // 30fps
-        public static readonly String GlassesWindowTitle = "BBIWARG - GlassesOuty   put";
+
+        /// <summary>
+        /// the titel of the debug window
+        /// </summary>
+        public static readonly String GlassesWindowTitle = "BBIWARG - GlassesOutput";
+
+        /// <summary>
+        /// number of calibration points
+        /// </summary>
         public static readonly int GlassesWindowNumCalibrationPoints = 20;
 
-        // TUIO
+        #endregion
+
+
+        #region tuio
+
+        /// <summary>
+        /// true iff the tuio server is enabled
+        /// </summary>
         public static readonly bool TuioEnabledByDefault = true;
+
+        /// <summary>
+        /// the default ip address of the tuio server
+        /// </summary>
         public static readonly String TuioDefaultIP = "127.0.0.1";
+
+        /// <summary>
+        /// the default port of the tuio server
+        /// </summary>
         public static readonly Int16 TuioDefaultPort = 3333;
 
-        // confidence image
+        #endregion
+
+
+        #region ConfidenceImage
+
+        /// <summary>
+        /// the minimum confidence threshold for pixel values to be considered correct
+        /// </summary>
         public static readonly int ConfidenceImageMinThreshold = 500;
 
-        // depth image
+        #endregion
+
+
+        #region DepthImage
+
+        /// <summary>
+        /// the size of the median filter used to filter the depth image
+        /// </summary>
         public static readonly int DepthImageMedianSize = 5;
+
+        /// <summary>
+        /// the depth range which is considered important (in mm)
+        /// </summary>
         public static readonly int DepthImageDepthRange = 200; // <255
 
-        // edge image
+        #endregion
+
+
+        #region EdgeImage
+
+        /// <summary>
+        /// start threshold for the canny edge detector used to detect edges in the depth image
+        /// </summary>
         public static readonly int EdgeImageCannyStartThreshold = 80;
+
+        /// <summary>
+        /// linking threshold for the canny edge detector used to detect edges in the depth image 
+        /// </summary>
         public static readonly int EdgeImageCannyLinkingThreshold = 60;
+
+        /// <summary>
+        /// filter size for the canny edge detector used to detect edges in the depth image  
+        /// </summary>
         public static readonly int EdgeImageCannySize = 3;
+
+        /// <summary>
+        /// number of dilation iterations to generate the rough edge image from the edge image
+        /// </summary>
         public static readonly int EdgeImageRoughNumDilationIterations = 1;
 
-        // general tracking
+        #endregion
+
+
+        #region general tracking
+        
+        /// <summary>
+        /// if a tracked object moves this relative amount it will have a similarity of 0 to itself at the previous position
+        /// </summary>
         public static readonly float TrackerMaxRelativeMove = 0.25f;
 
-        // finger detection
+        #endregion
+
+
+        #region finger detection
+
         public static readonly int FingerStepSize = 1;
         public static readonly int FingerMaxGapCounter = 3;
         public static readonly int FingerMaxSliceDifferencePerStep = 5;
@@ -76,23 +209,86 @@ namespace bbiwarg
         public static readonly int FingerMaxCrippleDifference = 20;
         public static readonly int FingerContourMargin = 4;
 
-        // finger tracking
+        #endregion
+
+
+        #region finger tracking
+
+        /// <summary>
+        /// number of frames a finger needs to be detected before it is tracked
+        /// </summary>
         public static readonly int FingerTrackerNumFramesDetectedUntilTracked = 5;
+
+        /// <summary>
+        /// number of frames a finger needs to be lost before it is deleted
+        /// </summary>
         public static readonly int FingerTrackerNumFramesLostUntilDeleted = 10;
+
+        /// <summary>
+        /// xx entry for the measurement noise covariance matrix for the kalman filter used to smooth the finger hand and tip points
+        /// </summary>
         public static readonly float FingermXX = 0.000005f;
+
+        /// <summary>
+        /// xy and yx entry for the measurement noise covariance matrix for the kalman filter used to smooth the finger hand and tip points
+        /// </summary>
         public static readonly float FingermXY = 0.0f;
+
+        /// <summary>
+        /// yy entry for the measurement noise covariance matrix for the kalman filter used to smooth the finger hand and tip points
+        /// </summary>
         public static readonly float FingermYY = 0.000005f;
+
+        /// <summary>
+        /// number of finger slice directions used to compute the mean finger direction
+        /// </summary>
         public static readonly int FingerTrackerNumDirectionsForMeanDirection = 10;
+
+        /// <summary>
+        /// if the tip point of a finger moves this relative amount it will have a similarity of 0 to itself at the previous position
+        /// </summary>
         public static readonly float FingerTrackerMaxTipPointRelativeMove = TrackerMaxRelativeMove;
+
+        /// <summary>
+        /// if the hand point of a finger moves this relative amount it will have a similarity of 0 to itself at the previous position
+        /// </summary>
         public static readonly float FingerTrackerMaxHandPointRelativeMove = TrackerMaxRelativeMove;
 
-        // hand detection
+        #endregion
+
+
+        #region hand detection
+        
+        /// <summary>
+        /// number of colors used to draw hands
+        /// </summary>
         public static readonly int HandNumColors = 3;
+
+        /// <summary>
+        /// maximum downwards depth difference between a pixel and a neighboring pixel belonging to the same hand
+        /// </summary>
         public static readonly int HandFloodFillDownDiff = 2;
+
+        /// <summary>
+        /// maximum upwards depth difference between a pixel and a neighboring pixel belonging to the same hand
+        /// </summary>
         public static readonly int HandFloodFillUpDiff = 2;
+
+        /// <summary>
+        /// maximum size of a hand relative to the whole image
+        /// </summary>
         public static readonly float HandMaxSize = 0.6f;
+
+        /// <summary>
+        /// minimum size of a hand realtive to the whole image
+        /// </summary>
         public static readonly float HandMinSize = 0.01f;
+
+        /// <summary>
+        /// maximum size of a hand extension mask relative to the whole image
+        /// </summary>
         public static readonly float HandExtensionMaxRelativeSize = 0.5f * HandMaxSize;
+
         public static readonly int HandExtendMaxDifference = 40;
         public static readonly float HandThumbDefectMaxDistanceToThumb = Parameters.FingerMaxWidth2D;
         public static readonly float HandThumbDefectMinThumbShortLengthRatio = 0.75f;
@@ -100,34 +296,126 @@ namespace bbiwarg
         public static readonly float HandThumbDefectMinShortLongLengthRatio = 0.3f;
         public static readonly float HandThumbDefectMaxShortLongLengthRatio = 0.7f;
 
-        // hand tracker
+        #endregion
+
+
+        #region hand tracker
+
+        /// <summary>
+        /// number of frames a hand needs to be detected before it is tracked
+        /// </summary>
         public static readonly int HandTrackerNumFramesDetectedUntilTracked = 5;
+
+        /// <summary>
+        /// number of frames a hand needs to be lost before it is deleted
+        /// </summary>
         public static readonly int HandTrackerNumFramesLostUntilDeleted = 5;
+
+        /// <summary>
+        /// if the centroid of a hand moves this relative amount it will have a similarity of 0 to itself at the previous position
+        /// </summary>
         public static readonly float HandTrackerMaxCentroidRelativeMove = TrackerMaxRelativeMove;
+
+        /// <summary>
+        /// xx entry for the measurement noise covariance matrix for the kalman filter used to smooth the hand centroid
+        /// </summary>
         public static readonly float HandmXX = 0.0005f;
+
+        /// <summary>
+        /// xy and yx entry for the measurement noise covariance matrix for the kalman filter used to smooth the hand centroid
+        /// </summary>
         public static readonly float HandmXY = 0.0f;
+
+        /// <summary>
+        /// yy entry for the measurement noise covariance matrix for the kalman filter used to smooth the hand centroid
+        /// </summary>
         public static readonly float HandmYY = 0.0005f;
 
-        // palm detection
+        #endregion
+
+
+        #region palm detection
+
+        /// <summary>
+        /// number of positions along the forefinger used as starting points to detect the palm width
+        /// </summary>
         public static readonly int PalmNumPositionsForPalmWidth = 5;
+
+        /// <summary>
+        /// relative tolerance which specifies when a point is considered to be in the palm grid
+        /// </summary>
         public static readonly float PalmInsideTolerance = 0.1f;
 
-        // palm tracker
+        #endregion
+
+
+        #region palm tracker
+
+        /// <summary>
+        /// number of frames a palm needs to be detected before it is tracked
+        /// </summary>
         public static readonly int PalmTrackerNumFramesDetectedUntilTracked = 5;
+
+        /// <summary>
+        /// number of frames a palm needs to be lost before it is deleted
+        /// </summary>
         public static readonly int PalmTrackerNumFramesLostUntilDeleted = 5;
+
+        /// <summary>
+        /// if the upper wrist point of the palm grid moves this relative amount it will have a similarity of 0 to itself at the previous position
+        /// </summary>
         public static readonly float PalmTrackerMaxWristUpperRelativeMove = TrackerMaxRelativeMove;
+
+        /// <summary>
+        /// if the lower wrist point of the palm grid moves this relative amount it will have a similarity of 0 to itself at the previous position
+        /// </summary>
         public static readonly float PalmTrackerMaxWristLowerRelativeMove = TrackerMaxRelativeMove;
+
+        /// <summary>
+        /// if the upper finger point of the palm grid moves this relative amount it will have a similarity of 0 to itself at the previous position
+        /// </summary>
         public static readonly float PalmTrackerMaxFingersUpperRelativeMove = TrackerMaxRelativeMove;
+
+        /// <summary>
+        /// if the lower finger point of the palm grid moves this relative amount it will have a similarity of 0 to itself at the previous position
+        /// </summary>
         public static readonly float PalmTrackerMaxFingersLowerRelativeMove = TrackerMaxRelativeMove;
+
+        /// <summary>
+        /// xx entry for the measurement noise covariance matrix for the kalman filter used to smooth the palm grid points
+        /// </summary>
         public static readonly float PalmmXX = 0.00005f;
+
+        /// <summary>
+        /// xy and yx entry for the measurement noise covariance matrix for the kalman filter used to smooth the palm grid points
+        /// </summary>
         public static readonly float PalmmXY = 0.0f;
+
+        /// <summary>
+        /// yy entry for the measurement noise covariance matrix for the kalman filter used to smooth the palm grid points
+        /// </summary>
         public static readonly float PalmmYY = 0.00005f;
 
-        //palm Grid
+        #endregion
+
+
+        #region palm grid
+
+        /// <summary>
+        /// default number of palm grid rows
+        /// </summary>
         public static readonly int PalmGridDefaultNumRows = 3;
+
+        /// <summary>
+        /// default number of palm grid columns
+        /// </summary>
         public static readonly int PalmGridDefaultNumColumns = 4;
 
-        // touch detection
+        #endregion
+
+
+        #region touch
+
         public static readonly float TouchMinTouchValue = 0.3f;
         public static readonly int TouchAreaSize = 30;
         public static readonly int TouchFloodfillDownDiff = 1;
@@ -135,58 +423,245 @@ namespace bbiwarg
         public static readonly int TouchTipInsideFactor = 2;
         public static readonly int TouchTipOutsideFactor = 7;
 
-        // touch tracking
+        #endregion
+
+
+        #region touch tracking
+
+        /// <summary>
+        /// number of frames an object needs to be detected before it is tracked
+        /// </summary>
         public static readonly int TouchTrackerNumFramesDetectedUntilTracked = 1;
+
+        /// <summary>
+        /// number of frames an object needs to be lost before it is deleted
+        /// </summary>
         public static readonly int TouchTrackerNumFramesLostUntilDeleted = 5;
+
+        /// <summary>
+        /// if the absolute position of a tracked touch event moves this relative amount it will have a similarity of 0 to itself at the previous position
+        /// </summary>
         public static readonly float TouchTrackerMaxAbsolutePositionRelativeMove = TrackerMaxRelativeMove;
+
+        /// <summary>
+        /// xx entry for the measurement noise covariance matrix for the kalman filter used to smooth touch events
+        /// </summary>
         public static readonly float TouchmXX = 0.003f;
+
+        /// <summary>
+        /// xy and yx entry for the measurement noise covariance matrix for the kalman filter used to smooth touch events
+        /// </summary>
         public static readonly float TouchmXY = 0.0f;
+
+        /// <summary>
+        /// yy entry for the measurement noise covariance matrix for the kalman filter used to smooth touch events
+        /// </summary>
         public static readonly float TouchmYY = 0.00165f;
+
+        /// <summary>
+        /// value used for all entries in the process noise covariance matrix for the kalman filter used to smooth touch events
+        /// </summary>
         public static readonly float TouchProcessNoise = 3.0e-4f;
 
-        // touchEventVisualizer
+        #endregion
+
+
+        #region TouchEventVisualizer
+
+        /// <summary>
+        /// time in ms after which old touch events are removed from the touch event visualizer
+        /// </summary>
         public static readonly int TouchEventVisualizerFadeOutTime = 1500;
 
-        // homographyExport
+        #endregion
+
+
+        #region homographyExport
+
+        /// <summary>
+        /// file name of the file to which the homography is written
+        /// </summary>
         public static readonly String HomographyFileName = "homography.txt";
 
-        // colors
+        #endregion
+
+
+        #region colors
+
+        #region general
+
+        /// <summary>
+        /// color used to draw detected objects
+        /// </summary>
         public static readonly Color ColorDetected = Color.Turquoise;
+
+        /// <summary>
+        /// color used to draw tracked objects
+        /// </summary>
         public static readonly Color ColorTracked = Color.Yellow;
 
+        #endregion
+
+
+        #region images
+        
+        /// <summary>
+        /// color used to specify which color channels the depth image is drawn to
+        /// </summary>
         public static readonly Color DepthImageColor = Color.White;
+
+        /// <summary>
+        /// color used to specify which color channels the edge image is drawn to
+        /// </summary>
         public static readonly Color EdgeImageColor = Color.Blue;
+
+        /// <summary>
+        /// color used to draw the borders of the output images
+        /// </summary>
         public static readonly Color OutputImageBorderColor = Color.White;
 
+        #endregion
+
+
+        #region finger
+
+        /// <summary>
+        /// color used to draw the finger slices
+        /// </summary>
         public static readonly Color FingerSliceColor = Color.Magenta;
+
+        /// <summary>
+        /// color used to draw the detected fingers
+        /// </summary>
         public static readonly Color FingerDetectedColor = ColorDetected;
+
+        /// <summary>
+        /// color used to draw the tracked fingers
+        /// </summary>
         public static readonly Color FingerTrackedColor = ColorTracked;
+
+        /// <summary>
+        /// color used to draw the finger tip point
+        /// </summary>
         public static readonly Color FingerTipColor = Color.Blue;
+
+        /// <summary>
+        /// color used to draw the finger hand point
+        /// </summary>
         public static readonly Color FingerHandColor = Color.Yellow;
+
+        /// <summary>
+        /// color used to draw the finger contour
+        /// </summary>
         public static readonly Color FingerContourColor = Color.Red;
+
+        /// <summary>
+        /// color used to draw the text for the finger id
+        /// </summary>
         public static readonly Color FingerIDColor = Color.White;
 
+        #endregion
+
+
+        #region touch
+
+        /// <summary>
+        /// color used to draw detected touch events
+        /// </summary>
         public static readonly Color TouchEventDetectedColor = ColorDetected;
+
+        /// <summary>
+        /// color used to draw tracked touch events
+        /// </summary>
         public static readonly Color TouchEventTrackedColor = ColorTracked;
-        public static readonly Color TouchEventAreaMatchedSubtractColor = Color.DarkOrange;
-        public static readonly Color TouchEventAreaNonMatchedSubtractColor = Color.DarkSlateGray;
-        public static readonly Color TouchEventStatusBarColor = Color.Green;
 
+        #endregion
+
+
+        #region TouchEventVisualizer
+
+        /// <summary>
+        /// color used to draw the lines between touch events in the touch event visualizer
+        /// </summary>
         public static readonly Color TouchEventVisualizerLineColor = Color.Yellow;
+
+        /// <summary>
+        /// color used to draw the touch event points in the touch event visualizer
+        /// </summary>
         public static readonly Color TouchEventVisualizerPointColor = Color.Red;
+
+        /// <summary>
+        /// color used to draw the grid in the touch event visualizer
+        /// </summary>
         public static readonly Color TouchEventVisualizerGridColor = Color.White;
+
+        /// <summary>
+        /// color used to draw the text in the touch event visualizer
+        /// </summary>
         public static readonly Color TouchEventVisualizerTextColor = Color.White;
+
+        /// <summary>
+        /// color used to highlight the active block in the touch event visualizer
+        /// </summary>
         public static readonly Color TouchEventVisualizerActiveBlockColor = Color.DarkSlateGray;
 
+        #endregion
+
+
+        #region palm
+
+        /// <summary>
+        /// color used to draw the plam quadrangle
+        /// </summary>
         public static readonly Color PalmQuadColor = Color.Blue;
+
+        /// <summary>
+        /// color used to draw the palm grid in the palm
+        /// </summary>
         public static readonly Color PalmGridColor = Color.CornflowerBlue;
 
+        #endregion
+
+
+        #region hand
+
+        /// <summary>
+        /// colors used to draw the hands (ith element is a color which specifies the color channels used to draw the ith hand)
+        /// </summary>
         public static readonly Color[] HandColors = new Color[3] { Color.Red, Color.Blue, Color.Green };
+
+        /// <summary>
+        /// color used to draw the hand centroid
+        /// </summary>
         public static readonly Color HandCentroidColor = Color.Yellow;
+
+        /// <summary>
+        /// color used to draw the hand id text
+        /// </summary>
         public static readonly Color HandIDColor = Color.White;
+
+        /// <summary>
+        /// color used to draw the points of the thumb defects
+        /// </summary>
         public static readonly Color HandThumbDefectPointColor = Color.Lime;
+
+        /// <summary>
+        /// color used to draw the lines of the thumb defects
+        /// </summary>
         public static readonly Color HandThumbDefectLineColor = Color.CornflowerBlue;
 
+        #endregion
+
+
+        #region calibration
+
+        /// <summary>
+        /// color used to draw the calibration points in the glasses window
+        /// </summary>
         public static readonly Color CalibrationPointColor = Color.Yellow;
+
+        #endregion
+
+        #endregion
     }
 }

+ 1 - 1
bbiwarg/Recognition/FingerRecognition/FingerTracker.cs

@@ -10,7 +10,7 @@ using bbiwarg.Input.InputHandling;
 namespace bbiwarg.Recognition.FingerRecognition
 {
     /// <summary>
-    /// Tracks fingers over  multiple frames.
+    /// Keeps track of fingers over a period of time.
     /// </summary>
     class FingerTracker : Tracker<Finger, TrackedFinger>
     {

+ 3 - 3
bbiwarg/Recognition/FingerRecognition/TrackedFinger.cs

@@ -14,12 +14,12 @@ namespace bbiwarg.Recognition.FingerRecognition
     class TrackedFinger : TrackedObject<Finger>
     {
         /// <summary>
-        /// the kalman filter for the tip point
+        /// the kalman filter for the tip point prediction
         /// </summary>
         private Kalman2DPositionFilter tipPointKalman;
 
         /// <summary>
-        /// the kalman filter for the hand point
+        /// the kalman filter for the hand point prediction
         /// </summary>
         private Kalman2DPositionFilter handPointKalman;
 
@@ -64,7 +64,7 @@ namespace bbiwarg.Recognition.FingerRecognition
         }
 
         /// <summary>
-        /// Updates the tracked finger with its best match in the current frame, logs the state change, corrects the finger's direction if needed and updates the position predictions (kalman filters).
+        /// Updates the tracked finger with the given finger, logs the state change, corrects the finger's direction if needed and updates the position predictions (kalman filters).
         /// </summary>
         /// <param name="detectedFinger">The detected finger.</param>
         public override void updateFrame(Finger detectedFinger)

+ 46 - 1
bbiwarg/Recognition/HandRecognition/Hand.cs

@@ -13,13 +13,36 @@ using bbiwarg.Recognition.PalmRecognition;
 
 namespace bbiwarg.Recognition.HandRecognition
 {
+    /// <summary>
+    /// Represents a Hand.
+    /// </summary>
     public class Hand : TrackableObject
     {
+        /// <summary>
+        /// the center of gravity of the hand
+        /// </summary>
         public Vector2D Centroid { get; private set; }
+
+        /// <summary>
+        /// a mask of the hand (0=outside, 1=in hand)
+        /// </summary>
         public Image<Gray, byte> Mask { get; private set; }
+
+        /// <summary>
+        /// the fingers belonging to the hand
+        /// </summary>
         public List<Finger> Fingers { get; private set; }
+
+        /// <summary>
+        /// the palm belonging to the hand
+        /// </summary>
         public Palm Palm { get; set; }
 
+        /// <summary>
+        /// Initializes a new instance of the Hand class.
+        /// </summary>
+        /// <param name="mask">The mask.</param>
+        /// <param name="fingers">The fingers.</param>
         public Hand(Image<Gray, byte> mask, List<Finger> fingers)
         {
             Mask = mask;
@@ -29,11 +52,20 @@ namespace bbiwarg.Recognition.HandRecognition
                 finger.Hand = this;
         }
 
+        /// <summary>
+        /// Checks wether a given point is inside the hand.
+        /// </summary>
+        /// <param name="point">the point</param>
+        /// <returns>wether the point is inside the hand</returns>
         public bool isInside(Vector2D point)
         {
             return (Mask.Data[point.IntY, point.IntX, 0] != 0);
         }
 
+        /// <summary>
+        /// Merges another Hand to this hand by extending the mask and adding the fingers.
+        /// </summary>
+        /// <param name="mergeHand">the other hand</param>
         public void mergeWith(Hand mergeHand)
         {
             extendMask(mergeHand.Mask);
@@ -42,13 +74,23 @@ namespace bbiwarg.Recognition.HandRecognition
                 finger.Hand = this;
         }
 
+        /// <summary>
+        /// Extends the mask.
+        /// </summary>
+        /// <param name="extendMask">the mask of the extension</param>
         public void extendMask(Image<Gray, byte> extendMask)
         {
             Mask = Mask.Or(extendMask);
         }
 
-        public void fillOverlappingFingers(List<Finger> otherFingers, ImageSize imageSize)
+        /// <summary>
+        /// Fills the Hand mask defects caused by overlapping fingers.
+        /// </summary>
+        /// <param name="otherFingers">list of fingers that don't belong to this hand</param>
+        public void fillOverlappingFingers(List<Finger> otherFingers)
         {
+            ImageSize imageSize = new ImageSize(Mask.Width, Mask.Height);
+
             foreach (Finger finger in otherFingers)
             {
                 FingerSliceTrail trail = null;
@@ -73,6 +115,9 @@ namespace bbiwarg.Recognition.HandRecognition
             Mask = Mask.Dilate(1);
         }
 
+        /// <summary>
+        /// Finds the hands centroid (center of gravity).
+        /// </summary>
         public void findCentroid()
         {
             MCvPoint2D64f gravityCenter = Mask.GetMoments(true).GravityCenter;

+ 71 - 2
bbiwarg/Recognition/HandRecognition/HandDetector.cs

@@ -16,14 +16,40 @@ using Emgu.CV.Structure;
 
 namespace bbiwarg.Recognition.HandRecognition
 {
+    /// <summary>
+    /// Finds Hands by iterating over all fingers and flooding them. Each filled region is considered to be one hand and each finger belongs to one hand. To improve the hand contours, the hand mask are filled with the defects caused by overlapping fingers.
+    /// </summary>
     class HandDetector
     {
+        /// <summary>
+        /// the depth image of the current frame
+        /// </summary>
         private DepthImage depthImage;
+
+        /// <summary>
+        /// the modified depth image of the current frame (hand flooding changes depth image)
+        /// </summary>
         private Image<Gray, byte> modifiedHandDepthImage;
+
+        /// <summary>
+        /// the fingers in the current frame
+        /// </summary>
         private List<Finger> fingers;
+
+        /// <summary>
+        /// a mapping of hands and list of fingers, that don't belong to that specific hand
+        /// </summary>
         private Dictionary<Hand, List<Finger>> otherHandsFingers;
+
+        /// <summary>
+        /// the hands in the current frame
+        /// </summary>
         private List<Hand> hands;
 
+        /// <summary>
+        /// Detects Hands in the current frame and stores found hands in frameData.detectedHands
+        /// </summary>
+        /// <param name="frameData">the current frame</param>
         public void detectHands(FrameData frameData)
         {
             depthImage = frameData.DepthImage;
@@ -37,6 +63,9 @@ namespace bbiwarg.Recognition.HandRecognition
             frameData.DetectedHands = hands;
         }
 
+        /// <summary>
+        /// Creates the modified hand image. The image is a copy of the original depth image with a contour around each finger (to prevent floodfill from filling through fingers).
+        /// </summary>
         private void createModifiedHandEdgeImage()
         {
             modifiedHandDepthImage = depthImage.Image.Copy();
@@ -49,6 +78,9 @@ namespace bbiwarg.Recognition.HandRecognition
             }
         }
 
+        /// <summary>
+        /// Finds hands by flood filling from each finger (mask). All unassigned fingers that lie within the hand are assigned to the hand, all other fingers are mapped as fingers that don't belong to that specific hand.
+        /// </summary>
         private void findHands()
         {
             hands = new List<Hand>();
@@ -60,6 +92,13 @@ namespace bbiwarg.Recognition.HandRecognition
                 if(!assignedFingers.Contains(finger)) {
                     Image<Gray, byte> handMask = getHandMask(finger.HandPoint);
 
+                    int numPixels = handMask.CountNonzero()[0];
+                    if (numPixels > Parameters.HandMaxSize * depthImage.Size.NumPixels)
+                    {
+                        assignedFingers.Add(finger);
+                        break;
+                    }
+
                     List<Finger> fingersOnHand = new List<Finger>();
                     List<Finger> fingersOnOtherHand = new List<Finger>();
 
@@ -80,20 +119,32 @@ namespace bbiwarg.Recognition.HandRecognition
             }
         }
 
+        /// <summary>
+        /// Flood fills from a given point and returns the filled area as mask.
+        /// </summary>
+        /// <param name="p">flood fill starting point</param>
+        /// <returns>the filled area as mask</returns>
         private Image<Gray, byte> getHandMask(Vector2D p)
         {
             Image<Gray, byte> mask = new Image<Gray, byte>(depthImage.Size.Width + 2, depthImage.Size.Height + 2);
             MCvConnectedComp comp = new MCvConnectedComp();
-            CvInvoke.cvFloodFill(modifiedHandDepthImage, p, new MCvScalar(255), new MCvScalar(Parameters.HandFloodFillDownDiff), new MCvScalar(Parameters.HandFloodFillUpDiff), out comp, Emgu.CV.CvEnum.CONNECTIVITY.FOUR_CONNECTED, Emgu.CV.CvEnum.FLOODFILL_FLAG.DEFAULT, mask);
+            CvInvoke.cvFloodFill(modifiedHandDepthImage, p, new MCvScalar(255), new MCvScalar(Parameters.HandFloodFillDownDiff), 
+                new MCvScalar(Parameters.HandFloodFillUpDiff), out comp, Emgu.CV.CvEnum.CONNECTIVITY.FOUR_CONNECTED, Emgu.CV.CvEnum.FLOODFILL_FLAG.DEFAULT, mask);
             return mask.Copy(new Rectangle(1, 1, depthImage.Size.Width, depthImage.Size.Height));
         }
 
+        /// <summary>
+        /// Fixes overlapping fingers by merging two hands if they are seperated by a finger and/or fills holes caused by overlapping fingers.
+        /// </summary>
         private void fixOverlappingFingers()
         {
             extendOrMergeThroughOverlappingFingers();
             fillOverlappingFingers();
         }
 
+        /// <summary>
+        /// Merges two hands if they are seperatd by an overlapping finger or extends the hand mask through an overlapping finger.
+        /// </summary>
         private void extendOrMergeThroughOverlappingFingers()
         {
             List<Hand> mergedHands = new List<Hand>();
@@ -156,14 +207,26 @@ namespace bbiwarg.Recognition.HandRecognition
                 hands.Remove(mergedHand);
         }
 
+        /// <summary>
+        /// Merges two hands together and updates the list of other hands fingers.
+        /// </summary>
+        /// <param name="hand">the first hand (other hand will be merged to this one)</param>
+        /// <param name="mergeHand">the second hand (this hand will be dropped afterwards)</param>
         private void mergeToHand(Hand hand, Hand mergeHand)
         {
             hand.mergeWith(mergeHand);
 
             foreach (Finger finger in mergeHand.Fingers)
                 otherHandsFingers[hand].Remove(finger);
+
+            otherHandsFingers.Remove(mergeHand);
         }
 
+        /// <summary>
+        /// Extends the hand mask of a given hand by flood filling starting from the given point.
+        /// </summary>
+        /// <param name="hand">the hand that should be extended</param>
+        /// <param name="p">the flood fill starting point</param>
         private void extendToHand(Hand hand, Vector2D p)
         {
             Image<Gray, byte> extendMask = getHandMask(p);
@@ -173,14 +236,20 @@ namespace bbiwarg.Recognition.HandRecognition
                 hand.extendMask(extendMask);
         }
 
+        /// <summary>
+        /// Fills holes caused by overlapping fingers.
+        /// </summary>
         private void fillOverlappingFingers()
         {
             foreach (Hand hand in hands)
             {
-                hand.fillOverlappingFingers(otherHandsFingers[hand], depthImage.Size);
+                hand.fillOverlappingFingers(otherHandsFingers[hand]);
             }
         }
 
+        /// <summary>
+        /// Finds the hands centroids.
+        /// </summary>
         private void findCentroids() {
             foreach (Hand hand in hands)
                 hand.findCentroid();

+ 22 - 0
bbiwarg/Recognition/HandRecognition/HandTracker.cs

@@ -9,24 +9,46 @@ using bbiwarg.Utility;
 
 namespace bbiwarg.Recognition.HandRecognition
 {
+    /// <summary>
+    /// Keeps track of hands over a period of time.
+    /// </summary>
     class HandTracker : Tracker<Hand, TrackedHand>
     {
+        /// <summary>
+        /// Initializes a new instance of the HandTracker class.
+        /// </summary>
+        /// <param name="imageSize">Size of the input image.</param>
         public HandTracker(ImageSize imageSize)
             : base(imageSize)
         {
         }
 
+        /// <summary>
+        /// Updates the TrackedHands with the detected hands in the current frame and stores the results in frameData.trackedFingers.
+        /// </summary>
+        /// <param name="frameData">the current frame</param>
         public void trackHands(FrameData frameData)
         {
             trackObjects(frameData.DetectedHands);
             frameData.TrackedHands = getCurrentObjectsWithState(TrackingState.Tracked);
         }
 
+        /// <summary>
+        /// Calculates the similarity [0-1] of a tracked Hand and a detected Hand.
+        /// </summary>
+        /// <param name="trackedHand">the tracked hand</param>
+        /// <param name="detectedHand">the detected hand</param>
+        /// <returns>the similarity</returns>
         public override float calculateSimilarity(TrackedHand trackedHand, Hand detectedHand)
         {
             return getPositionSimilarity(trackedHand.CentroidPrediction, detectedHand.Centroid, Parameters.HandTrackerMaxCentroidRelativeMove);
         }
 
+        /// <summary>
+        /// Creates a TrackedHand.
+        /// </summary>
+        /// <param name="detectedObject">the detected hand</param>
+        /// <returns>a Trackedhand</returns>
         protected override TrackedHand createTrackedObject(Hand detectedObject)
         {
             return new TrackedHand(idPool.getNextUnusedID(), detectedObject, Parameters.HandTrackerNumFramesDetectedUntilTracked, Parameters.HandTrackerNumFramesLostUntilDeleted);

+ 23 - 0
bbiwarg/Recognition/HandRecognition/TrackedHand.cs

@@ -8,12 +8,28 @@ using bbiwarg.Utility;
 
 namespace bbiwarg.Recognition.HandRecognition
 {
+    /// <summary>
+    /// Represents a hand that is tracked for several frames
+    /// </summary>
     class TrackedHand : TrackedObject<Hand>
     {
+        /// <summary>
+        /// the kalman filter for the centroid prediction
+        /// </summary>
         private Kalman2DPositionFilter centroidKalman;
 
+        /// <summary>
+        /// predicted position of the centroid
+        /// </summary>
         public Vector2D CentroidPrediction { get { return centroidKalman.getPrediction(); } }
 
+        /// <summary>
+        /// Initializes a new instance of the TrackedHand class.
+        /// </summary>
+        /// <param name="id">The track ID.</param>
+        /// <param name="detectedHand">The detected hand.</param>
+        /// <param name="numFramesDetectedUntilTracked">The number of consecutive frames detected until the hand is considered to be tracked.</param>
+        /// <param name="numFramesLostUntilDeleted">The number of consecutive frames lost until the hand should be deleted.</param>
         public TrackedHand(int id, Hand detectedHand, int numFramesDetectedUntilTracked, int numFramesLostUntilDeleted)
             : base(id, detectedHand, numFramesDetectedUntilTracked, numFramesLostUntilDeleted)
         {
@@ -23,6 +39,10 @@ namespace bbiwarg.Recognition.HandRecognition
             logStateChange();
         }
 
+        /// <summary>
+        /// Updates the tracked hand with the given hand, logs the state change and updates the centroid  prediction (kalman filter)
+        /// </summary>
+        /// <param name="detectedHand">the detected hand</param>
         public override void updateFrame(Hand detectedHand)
         {
             base.updateFrame(detectedHand);
@@ -34,6 +54,9 @@ namespace bbiwarg.Recognition.HandRecognition
                 centroidKalman.getCorrectedPosition(detectedHand.Centroid);
         }
 
+        /// <summary>
+        /// logs the state change
+        /// </summary>
         private void logStateChange()
         {
             String stateAsString = CurrentState.ToString().ToLower();

+ 60 - 0
bbiwarg/Recognition/PalmRecognition/Palm.cs

@@ -9,6 +9,9 @@ using bbiwarg.Recognition.Tracking;
 
 namespace bbiwarg.Recognition.PalmRecognition
 {
+    /// <summary>
+    /// The handedness of the palm.
+    /// </summary>
     public enum HandSide
     {
         Undefined = 0,
@@ -16,17 +19,61 @@ namespace bbiwarg.Recognition.PalmRecognition
         Left = 2
     }
 
+    /// <summary>
+    /// Represents a palm (each hand with one finger (thumb) has palm)
+    /// </summary>
     public class Palm : TrackableObject
     {
+        /// <summary>
+        /// the hand belonging to this palm
+        /// </summary>
         public Hand Hand { get; private set; }
+
+        /// <summary>
+        /// the thumb's convexity defect
+        /// </summary>
         public ConvexityDefect ThumbDefect { get; private set; }
+
+        /// <summary>
+        /// the handedness
+        /// </summary>
         public HandSide HandSide { get; private set; }
+
+        /// <summary>
+        /// the position of the upper wrist (top left corner for left hand, top right for right hand)
+        /// </summary>
         public Vector2D WristUpper { get; private set; }
+
+        /// <summary>
+        /// the position of the upper fingers (top right for left hand, top left for left hannd)
+        /// </summary>
         public Vector2D FingersUpper { get; private set; }
+
+        /// <summary>
+        /// the position of the fingers lower (bottom right for left hand, bottom left for right hand)
+        /// </summary>
         public Vector2D FingersLower { get; private set; }
+
+        /// <summary>
+        /// the position of the wrist lower (bottom left for left hand, bottom right for right hand)
+        /// </summary>
         public Vector2D WristLower { get; private set; }
+
+        /// <summary>
+        /// the quadrangle of the four palm points
+        /// </summary>
         public Quadrangle Quad { get; private set; }
 
+        /// <summary>
+        /// Initializes a new instance of the Palm class.
+        /// </summary>
+        /// <param name="hand">The hand.</param>
+        /// <param name="thumbDefect">The thumb defect.</param>
+        /// <param name="handSide">The handedness.</param>
+        /// <param name="wristUpper">The wrist upper position.</param>
+        /// <param name="fingersUpper">The fingers upper positition.</param>
+        /// <param name="fingersLower">The fingers lower position.</param>
+        /// <param name="wristLower">The wrist lower position.</param>
         public Palm(Hand hand, ConvexityDefect thumbDefect, HandSide handSide, Vector2D wristUpper, Vector2D fingersUpper, Vector2D fingersLower, Vector2D wristLower)
         {
             Hand = hand;
@@ -42,6 +89,11 @@ namespace bbiwarg.Recognition.PalmRecognition
             createQuad();
         }
 
+        /// <summary>
+        /// Gets the relative position [0-1;0-1] from an absolue position.
+        /// </summary>
+        /// <param name="absolutePosition">the absolute position</param>
+        /// <returns>the relative position</returns>
         public Vector2D getRelativePosition(Vector2D absolutePosition)
         {
             Vector2D relativePosition = Quad.getRelativePosition(absolutePosition);
@@ -51,11 +103,19 @@ namespace bbiwarg.Recognition.PalmRecognition
             return new Vector2D(x, y);
         }
 
+        /// <summary>
+        /// Checks if the position is inside the palm (with a tolerance)
+        /// </summary>
+        /// <param name="position">the absolute position</param>
+        /// <returns>wether the position is inside the palm</returns>
         public bool isInside(Vector2D position)
         {
             return Quad.isInside(position, Parameters.PalmInsideTolerance);
         }
 
+        /// <summary>
+        /// Creates the palm quadrangle
+        /// </summary>
         private void createQuad()
         {
             if (HandSide == HandSide.Left)

+ 65 - 2
bbiwarg/Recognition/PalmRecognition/PalmDetector.cs

@@ -14,12 +14,30 @@ using Emgu.CV.Structure;
 
 namespace bbiwarg.Recognition.PalmRecognition
 {
+    /// <summary>
+    /// Detects palms by iterating over each hand, if the hand has exactly one finger (possible thumb) it calculates its convexity defects and checks if there is a convexity defect that matches the requirements for a thumb defect. If a thumb defect is found, the four palm points are generated and a new palm is created.
+    /// </summary>
     class PalmDetector
     {
+        /// <summary>
+        /// the depth image of the current frame
+        /// </summary>
         private DepthImage depthImage;
+
+        /// <summary>
+        /// the hands in the current frame
+        /// </summary>
         private List<Hand> hands;
+
+        /// <summary>
+        /// the palms in the current frames
+        /// </summary>
         private List<Palm> palms;
 
+        /// <summary>
+        /// Detects palms in the current frame and stores them in frameData.detectedPalms
+        /// </summary>
+        /// <param name="frameData">the current frame</param>
         public void detectPalms(FrameData frameData)
         {
             depthImage = frameData.DepthImage;
@@ -30,6 +48,9 @@ namespace bbiwarg.Recognition.PalmRecognition
             frameData.DetectedPalms = palms;
         }
 
+        /// <summary>
+        /// Find Palms by checking each hand with only one finger, if that finger is a thumb.
+        /// </summary>
         private void findPalms()
         {
             palms = new List<Palm>();
@@ -48,6 +69,11 @@ namespace bbiwarg.Recognition.PalmRecognition
             }
         }
 
+        /// <summary>
+        /// Gets the convexity Defects from a hand mask.
+        /// </summary>
+        /// <param name="hand">the hand</param>
+        /// <returns>the conveixty defects</returns>
         private List<ConvexityDefect> findConvexityDefects(Hand hand)
         {
             List<ConvexityDefect> convexityDefects;
@@ -63,6 +89,12 @@ namespace bbiwarg.Recognition.PalmRecognition
             return convexityDefects;
         }
 
+        /// <summary>
+        /// Checks all convexity defects sorted by length if they match the thumb defect requirements and returns the first match or null if none match.
+        /// </summary>
+        /// <param name="thumb">the possible thumb</param>
+        /// <param name="convexityDefects">the convexity defects</param>
+        /// <returns></returns>
         private ConvexityDefect findThumbDefect(Finger thumb, List<ConvexityDefect> convexityDefects)
         {
             convexityDefects.Sort((cd1, cd2) => (cd2.Depth.CompareTo(cd1.Depth)));
@@ -74,6 +106,12 @@ namespace bbiwarg.Recognition.PalmRecognition
             return null;
         }
 
+        /// <summary>
+        /// Finds the four palm points and creates a new palm
+        /// </summary>
+        /// <param name="hand">the hand of the palm</param>
+        /// <param name="thumbDefect">the convexity defect of the thumb</param>
+        /// <returns>a new palm</returns>
         private Palm createPalm(Hand hand, ConvexityDefect thumbDefect)
         {
             HandSide side = determineHandSide(thumbDefect);
@@ -83,12 +121,17 @@ namespace bbiwarg.Recognition.PalmRecognition
             float palmLength = wristUpper.getDistanceTo(fingersUpper);
             Vector2D directionWristFingers = thumbDefect.VectorLong.normalize();
             Vector2D directionUpperLower = thumbDefect.VectorLong.getOrthogonal(side == HandSide.Right).normalize();
-            Vector2D wristLower = wristUpper + palmWidth*directionUpperLower;
+            Vector2D wristLower = wristUpper + palmWidth * directionUpperLower;
             Vector2D fingersLower = wristUpper + 0.75f * palmLength * directionWristFingers + 0.75f * palmWidth * directionUpperLower;
 
             return new Palm(hand, thumbDefect, side, wristUpper, fingersUpper, fingersLower, wristLower);
         }
 
+        /// <summary>
+        /// Determines the handedness of the palm's hand
+        /// </summary>
+        /// <param name="thumbDefect">the convexity defect of the thumb</param>
+        /// <returns>the handedness of the palm's hand</returns>
         private HandSide determineHandSide(ConvexityDefect thumbDefect)
         {
             if (thumbDefect.VectorShort.crossProduct(thumbDefect.VectorLong) < 0)
@@ -97,11 +140,17 @@ namespace bbiwarg.Recognition.PalmRecognition
                 return HandSide.Left;
         }
 
+        /// <summary>
+        /// Finds the upper wrist end by walking from the defect.Inner towards the wrist direction, until it reaches the hand boundaries.
+        /// </summary>
+        /// <param name="hand">the hand</param>
+        /// <param name="thumbDefect">the convexity defect of the thumb</param>
+        /// <returns>the position of the upper wrist end</returns>
         private Vector2D findWristUpper(Hand hand, ConvexityDefect thumbDefect)
         {
             Vector2D wristDirection = thumbDefect.VectorLong.getInverse().normalize();
             Vector2D wristUpper = thumbDefect.Inner;
-            Vector2D wristUpperNext = wristUpper + 5*wristDirection;
+            Vector2D wristUpperNext = wristUpper + 5 * wristDirection;
             while (wristUpperNext.isInBound(depthImage.Size) && hand.isInside(wristUpperNext))
             {
                 wristUpper = wristUpperNext;
@@ -110,6 +159,13 @@ namespace bbiwarg.Recognition.PalmRecognition
             return wristUpper;
         }
 
+        /// <summary>
+        /// Finds the upper finger end by walking from the defect.OuterLong in the finger direction, until it reaches the end of the middle finger below.
+        /// </summary>
+        /// <param name="hand">the hand</param>
+        /// <param name="thumbDefect">the convexity defect of the thumb</param>
+        /// <param name="side">the handedness</param>
+        /// <returns>the position of the upper fingers end</returns>
         private Vector2D findFingersUpper(Hand hand, ConvexityDefect thumbDefect, HandSide side)
         {
             Vector2D fingersDirection = thumbDefect.VectorLong.normalize();
@@ -133,6 +189,13 @@ namespace bbiwarg.Recognition.PalmRecognition
             return fingersUpper;
         }
 
+        /// <summary>
+        /// Gets the palm width by checking for the maximum orthogonal length withing the hand along multiple positions on the index finger (defect.inner<->defect.outerLong)
+        /// </summary>
+        /// <param name="hand">the hand</param>
+        /// <param name="thumbDefect">the convexity defect of the thumb</param>
+        /// <param name="side">the handedness</param>
+        /// <returns>the palm width</returns>
         private float findPalmWidth(Hand hand, ConvexityDefect thumbDefect, HandSide side)
         {
             Vector2D lowerDirection = thumbDefect.VectorLong.getOrthogonal(side == HandSide.Right).normalize();

+ 27 - 0
bbiwarg/Recognition/PalmRecognition/PalmTracker.cs

@@ -9,19 +9,37 @@ using bbiwarg.Utility;
 
 namespace bbiwarg.Recognition.PalmRecognition
 {
+    /// <summary>
+    /// Keeps track of palms over a period of time.
+    /// </summary>
     class PalmTracker : Tracker<Palm, TrackedPalm>
     {
+
+        /// <summary>
+        /// Initializes a new instance of the PalmTracker class.
+        /// </summary>
+        /// <param name="imageSize">Size of the input image.</param>
         public PalmTracker(ImageSize imageSize)
             : base(imageSize)
         {
         }
 
+        /// <summary>
+        /// Updates the tracked palms with the detected palms in the current frame and stores the (optimized) results in frameData.trackedPalms
+        /// </summary>
+        /// <param name="frameData"></param>
         public void trackPalms(FrameData frameData)
         {
             trackObjects(frameData.DetectedPalms);
             frameData.TrackedPalms = getOptimizedPalms();
         }
 
+        /// <summary>
+        /// Calculates the similarity [0-1] of a tracked palm and a detected palm
+        /// </summary>
+        /// <param name="trackedPalm">the tracked palm</param>
+        /// <param name="detectedPalm">the detected palm</param>
+        /// <returns>the similarity</returns>
         public override float calculateSimilarity(TrackedPalm trackedPalm, Palm detectedPalm)
         {
             float handSimilarity = (detectedPalm.Hand.TrackID == trackedPalm.LastObject.Hand.TrackID) ? 1 : 0;
@@ -33,11 +51,20 @@ namespace bbiwarg.Recognition.PalmRecognition
             return handSimilarity * wristUpperSimilarity * wristLowerSimilarity * fingersUpperSimilarity * fingersLowerSimilarity;
         }
 
+        /// <summary>
+        /// Creates a TrackedPalm
+        /// </summary>
+        /// <param name="detectedPalm">the detected palm</param>
+        /// <returns>a new TrackedPalm</returns>
         protected override TrackedPalm createTrackedObject(Palm detectedPalm)
         {
             return new TrackedPalm(idPool.getNextUnusedID(), detectedPalm, Parameters.PalmTrackerNumFramesDetectedUntilTracked, Parameters.PalmTrackerNumFramesLostUntilDeleted);
         }
 
+        /// <summary>
+        /// Gets all optimized representations of all tracked palms.
+        /// </summary>
+        /// <returns>all tracked palms</returns>
         private List<Palm> getOptimizedPalms()
         {
             List<Palm> optimizedPalms = new List<Palm>();

+ 55 - 0
bbiwarg/Recognition/PalmRecognition/TrackedPalm.cs

@@ -9,19 +9,63 @@ using bbiwarg.Utility;
 
 namespace bbiwarg.Recognition.PalmRecognition
 {
+    /// <summary>
+    /// Represents a palm that ist tracked for several frames.
+    /// </summary>
     class TrackedPalm : TrackedObject<Palm>
     {
+        /// <summary>
+        /// the kalman filter for the wrist upper prediction
+        /// </summary>
         private Kalman2DPositionFilter wristUpperKalman;
+
+        /// <summary>
+        /// the kalman filter for the wrist lower prediction
+        /// </summary>
         private Kalman2DPositionFilter wristLowerKalman;
+
+        /// <summary>
+        /// the kalman filter for the fingers upper prediction
+        /// </summary>
         private Kalman2DPositionFilter fingersUpperKalman;
+
+        /// <summary>
+        /// the kalman filter for the fingers lower prediction
+        /// </summary>
         private Kalman2DPositionFilter fingersLowerKalman;
 
+        /// <summary>
+        /// the predicted position of the wrist upper
+        /// </summary>
         public Vector2D WristUpperPrediction { get { return wristUpperKalman.getPrediction(); } }
+
+        /// <summary>
+        /// the predicted position of the wrist lower
+        /// </summary>
         public Vector2D WristLowerPrediction { get { return wristLowerKalman.getPrediction(); } }
+
+        /// <summary>
+        /// the predicted position of the fingers upper
+        /// </summary>
         public Vector2D FingersUpperPrediction { get { return fingersUpperKalman.getPrediction(); } }
+
+        /// <summary>
+        /// the predicted position of the fingers lower
+        /// </summary>
         public Vector2D FingersLowerPrediction { get { return fingersLowerKalman.getPrediction(); } }
+
+        /// <summary>
+        /// the optimized palm (using predicted palm points)
+        /// </summary>
         public Palm OptimizedPalm { get; private set; }
 
+        /// <summary>
+        /// Initializes a new instance of the TrackedPalm class.
+        /// </summary>
+        /// <param name="id">The track ID.</param>
+        /// <param name="detectedPalm">The detected palm.</param>
+        /// <param name="numFramesDetectedUntilTracked">The number of consecutive frames detected until the palm is considered tracked.</param>
+        /// <param name="numFramesLostUntilDeleted">The number of consecutive frames lost until the palm is deleted.</param>
         public TrackedPalm(int id, Palm detectedPalm, int numFramesDetectedUntilTracked, int numFramesLostUntilDeleted)
             : base(id, detectedPalm, numFramesDetectedUntilTracked, numFramesLostUntilDeleted)
         {
@@ -38,6 +82,10 @@ namespace bbiwarg.Recognition.PalmRecognition
             logStateChange();
         }
 
+        /// <summary>
+        /// Updates the tracked palm with the given palm, logs the statechange, updates the palm point predictions and the optimized palm.
+        /// </summary>
+        /// <param name="detectedPalm"></param>
         public override void updateFrame(Palm detectedPalm)
         {
             base.updateFrame(detectedPalm);
@@ -56,11 +104,18 @@ namespace bbiwarg.Recognition.PalmRecognition
             }
         }
 
+        /// <summary>
+        /// Updates the optimized palm by creating a new palm with the predicted palm points
+        /// </summary>
+        /// <param name="detectedPalm"></param>
         private void updateOptimizedPalm(Palm detectedPalm)
         {
             OptimizedPalm = new Palm(detectedPalm.Hand, detectedPalm.ThumbDefect, detectedPalm.HandSide, WristUpperPrediction, FingersUpperPrediction, FingersLowerPrediction, WristLowerPrediction);
         }
 
+        /// <summary>
+        /// logs the state change
+        /// </summary>
         private void logStateChange()
         {
             String stateAsString = CurrentState.ToString().ToLower();

+ 14 - 0
bbiwarg/Utility/Logger.cs

@@ -6,6 +6,9 @@ using System.Threading.Tasks;
 
 namespace bbiwarg.Utility
 {
+    /// <summary>
+    /// flags describing the different log subjects
+    /// </summary>
     public enum LogSubject
     {
         None = 0,
@@ -22,10 +25,21 @@ namespace bbiwarg.Utility
         PalmTracker = 1024
     }
 
+    /// <summary>
+    /// Logs messages.
+    /// </summary>
     static class Logger
     {
+        /// <summary>
+        /// the current frame
+        /// </summary>
         public static int CurrentFrame { get; set; }
 
+        /// <summary>
+        /// May print a message depending on the subject and <see cref="Parameters.LoggerEnabledSubjects"/>.
+        /// </summary>
+        /// <param name="message">the message</param>
+        /// <param name="subject">the subject the message belongs to, determines if this message is printed</param>
         public static void log(string message, LogSubject subject)
         {
             if (Parameters.LoggerEnabledSubjects.HasFlag(subject))

+ 56 - 0
bbiwarg/Utility/Projection2DTo2D.cs

@@ -9,17 +9,54 @@ using Emgu.CV.Structure;
 
 namespace bbiwarg.Utility
 {
+    /// <summary>
+    /// Computes and stores a homography matrix and provides functions to export it and project points.
+    /// </summary>
     class Projection2DTo2D
     {
+        /// <summary>
+        /// size of the image the original points are in
+        /// </summary>
         private ImageSize sizeA;
+
+        /// <summary>
+        /// size of the image the projected points are in
+        /// </summary>
         private ImageSize sizeB;
+
+        /// <summary>
+        /// calibration points in the first image (match points in calibrationPointsB)
+        /// </summary>
         private List<PointF> calibrationPointsA;
+
+        /// <summary>
+        /// calibration points in the second image (match points in calibrationPointsA)
+        /// </summary>
         private List<PointF> calibrationPointsB;
+
+        /// <summary>
+        /// number of points used for the calibration
+        /// </summary>
         private int numPointsForCalibration;
+
+        /// <summary>
+        /// homography matrix used to compute the projected points
+        /// </summary>
         private HomographyMatrix homography;
 
+
+        /// <summary>
+        /// true iff the calibration is finished
+        /// </summary>
         public bool IsCalibrated { get; private set; }
 
+
+        /// <summary>
+        /// Constructs a Projection2DTo2D.
+        /// </summary>
+        /// <param name="sizeA">size of the image the original points are in</param>
+        /// <param name="sizeB">size of the image the projected points are in</param>
+        /// <param name="numPointsForCalibration">number of points used for the calibration</param>
         public Projection2DTo2D(ImageSize sizeA, ImageSize sizeB, int numPointsForCalibration = 4) {
             this.sizeA = sizeA;
             this.sizeB = sizeB;
@@ -28,6 +65,9 @@ namespace bbiwarg.Utility
             reset();
         }
 
+        /// <summary>
+        /// Resets the calibration.
+        /// </summary>
         public void reset() {
             homography = null;
             IsCalibrated = false;
@@ -35,6 +75,11 @@ namespace bbiwarg.Utility
             calibrationPointsB = new List<PointF>();
         }
 
+        /// <summary>
+        /// Adds a pair of calibration points.
+        /// </summary>
+        /// <param name="pointA">point in the first image</param>
+        /// <param name="pointB">point in the second image</param>
         public void addCalibrationPoints(Vector2D pointA, Vector2D pointB) {
             calibrationPointsA.Add(sizeA.getRelativePoint(pointA));
             calibrationPointsB.Add(sizeB.getRelativePoint(pointB));
@@ -43,12 +88,20 @@ namespace bbiwarg.Utility
                 calibrate();
         }
 
+        /// <summary>
+        /// Projects a point.
+        /// </summary>
+        /// <param name="pointA">the point to project</param>
+        /// <returns>projected point</returns>
         public Vector2D projectPoint(Vector2D pointA) {
             PointF[] pointfsB = new PointF[1] {sizeA.getRelativePoint(pointA)};
             homography.ProjectPoints(pointfsB);
             return sizeB.getAbsolutePoint(new Vector2D(pointfsB[0]));
         }
 
+        /// <summary>
+        /// Computes the homography from the lists of calibration points.
+        /// </summary>
         private void calibrate() {
             homography = CameraCalibration.FindHomography(calibrationPointsA.ToArray(), calibrationPointsB.ToArray(), Emgu.CV.CvEnum.HOMOGRAPHY_METHOD.DEFAULT, 0.995);
             
@@ -59,6 +112,9 @@ namespace bbiwarg.Utility
             exportHomography();
         }
 
+        /// <summary>
+        /// Writes the homography to a file.
+        /// </summary>
         private void exportHomography() {
             String[] fileData = new String[homography.Size.Height];
             StringBuilder sb = new StringBuilder();

+ 48 - 1
bbiwarg/Utility/Timer.cs

@@ -7,17 +7,56 @@ using System.Threading;
 
 namespace bbiwarg.Utility
 {
-    class Timer
+    /// <summary>
+    /// Stores and prints timing information for different code sections.
+    /// </summary>
+    static class Timer
     {
+        /// <summary>
+        /// used to prevent running <see cref="start"/>, <see cref="stop"/> and <see cref="outputAll"/> simultaneously from different threads
+        /// </summary>
         private static Object sync = new object();
+
+        /// <summary>
+        /// dictionary of stopwatches indexed by name of the code section
+        /// </summary>
         private static Dictionary<String, Stopwatch> stopwatches = new Dictionary<string, Stopwatch>();
+
+        /// <summary>
+        /// dictionary of current runtimes indexed by name of the code section
+        /// </summary>
         private static Dictionary<String, double> currentTimes = new Dictionary<string, double>();
+
+        /// <summary>
+        /// dictionary of minimum runtimes indexed by name of the code section
+        /// </summary>
         private static Dictionary<String, double> minTimes = new Dictionary<string, double>();
+
+        /// <summary>
+        /// dictionary of maximum runtimes indexed by name of the code section
+        /// </summary>
         private static Dictionary<String, double> maxTimes = new Dictionary<string, double>();
+
+        /// <summary>
+        /// dictionary of the sum of runtimes indexed by name of the code section
+        /// </summary>
         private static Dictionary<String, double> sumTimes = new Dictionary<string, double>();
+
+        /// <summary>
+        /// dictionary of the number of times the time was measured indexed by name of the code section
+        /// </summary>
         private static Dictionary<String, int> numTimes = new Dictionary<string, int>();
+
+        /// <summary>
+        /// the maximum length for the name of a code section
+        /// </summary>
         private static int maxNameLength = 1;
 
+
+        /// <summary>
+        /// Starts a timer for the given name and initializes the dictionaries when called for the first time with this name.
+        /// </summary>
+        /// <param name="name">name of the code section</param>
         public static void start(String name)
         {
             lock (sync)
@@ -36,6 +75,10 @@ namespace bbiwarg.Utility
             }
         }
 
+        /// <summary>
+        /// Stops the timer for the given name and stores timing information.
+        /// </summary>
+        /// <param name="name">name of the code section</param>
         public static void stop(String name)
         {
             lock (sync)
@@ -49,6 +92,10 @@ namespace bbiwarg.Utility
                 currentTimes[name] = time;
             }
         }
+
+        /// <summary>
+        /// Prints all collected timing information.
+        /// </summary>
         public static void outputAll()
         {
             lock (sync)