I have been trying to align the depth stream with my RGB stream from my Kinect for quite some time. I read several articles about this, but I must be missing a key point, because I can't get it to work ...
Here is an image of what I managed to do, I circled some easily distinguishable misses 
I try to make it as small a code as possible, but its still a good piece of code, so please feel free to, the code snippet below is what the Kinect SDK calls whenever there is depth and the RGB frame is ready.
As you can see, I experimented with
ColorImagePoint colorpoint = _Sensor.CoordinateMapper.MapDepthPointToColorPoint(DepthImageFormat.Resolution640x480Fps30, depthpoint, ColorImageFormat.RgbResolution640x480Fps30);
I would prefer to use CoordinateMapper.MapDepthFrameToColorFrame (since this should be able to solve the problem), but I can't get it to work. I probably don't do it right though ..
I am using Microsofts Kinect SDK 1.6
private void EventAllFramesReady(Object Sender, AllFramesReadyEventArgs e) { System.Drawing.Color color; Bitmap image = null; Bitmap depth = null; using (ColorImageFrame colorFrame = e.OpenColorImageFrame()) { using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { // color image = new Bitmap(colorFrame.Width, colorFrame.Height); byte[] colorPixels = new byte[colorFrame.PixelDataLength]; colorFrame.CopyPixelDataTo(colorPixels); //lock bitmap, and work with BitmapData (way faster than SetPixel()) BitmapData imageBitmapData = image.LockBits(new Rectangle(0, 0, image.Width, image.Height), ImageLockMode.WriteOnly, image.PixelFormat); IntPtr IptrImage = imageBitmapData.Scan0; byte[] PixelsImage = new byte[image.Width * image.Height * 4]; // depth depth = new Bitmap(depthFrame.Width, depthFrame.Height); DepthImagePixel[] depthData = new DepthImagePixel[depthFrame.PixelDataLength]; depthFrame.CopyDepthImagePixelDataTo(depthData); //lock bitmap, and work with BitmapData (way faster than SetPixel()) BitmapData depthBitmapData = depth.LockBits(new Rectangle(0, 0, depth.Width, depth.Height), ImageLockMode.WriteOnly, depth.PixelFormat); IntPtr IptrDepth = depthBitmapData.Scan0; byte[] PixelsDepth = new byte[depth.Width * depth.Height * 4]; DepthImagePoint depthpoint = new DepthImagePoint(); for (int x = 1; x < colorFrame.Width; x++) { for (int y = 1; y < colorFrame.Height; y++) { int i = ((y * image.Width) + x) * 4; short depthdistanceRAW = (depthData[x + y * depth.Width]).Depth; // convert distance value into a color color = System.Drawing.Color.Pink; if (depthdistanceRAW > 0 && depthdistanceRAW <= 4000) { int depthdistance = (int)((depthdistanceRAW / 4090f) * 255f); color = System.Drawing.Color.FromArgb((int)(depthdistance / 2f), depthdistance, (int)(depthdistance * 0.7f)); } depthpoint.X = x; depthpoint.Y = y; depthpoint.Depth = depthdistanceRAW; ColorImagePoint colorpoint = _Sensor.CoordinateMapper.MapDepthPointToColorPoint(DepthImageFormat.Resolution640x480Fps30, depthpoint, ColorImageFormat.RgbResolution640x480Fps30); //if (colorpoint.X > 0 && colorpoint.X <= 640 && colorpoint.Y > 0 && colorpoint.Y <= 480) //{ int adjustedposition = ((colorpoint.Y * image.Width) + colorpoint.X) * 4; //if (adjustedposition < depthData.Length) //{ PixelsDepth[i] = color.B; PixelsDepth[i + 1] = color.G; PixelsDepth[i + 2] = color.R; PixelsDepth[i + 3] = DepthTransparency; //} //} PixelsImage[i] = colorPixels[i]; PixelsImage[i + 1] = colorPixels[i + 1]; PixelsImage[i + 2] = colorPixels[i + 2]; PixelsImage[i + 3] = 255; } } Marshal.Copy(PixelsImage, 0, IptrImage, PixelsImage.Length); image.UnlockBits(imageBitmapData); Marshal.Copy(PixelsDepth, 0, IptrDepth, PixelsDepth.Length); depth.UnlockBits(depthBitmapData); } } _kf.UpdateImage(image); // update the RGB picture in the form _kf.UpdateDepth(depth); // update the Depth picture in the form }