Files
AiQ_GUI/Camera/ImageProcessing.cs

173 lines
6.8 KiB
C#
Raw Normal View History

2025-09-02 15:32:24 +01:00
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using System.Drawing.Imaging;
using System.Net;
using Image = System.Drawing.Image;
namespace AiQ_GUI
{
internal class ImageProcessing
{
// API to get snapshot then downsize and downscale image to save size.
public static async Task<Image?> GetProcessedImage(string suffix, string IPAddress, string DevPass, string? savePath = null, PictureBox? PcBx = null, bool SaveDisplay = false)
{
try
{
string requestUrl = $"http://{IPAddress}/{suffix}";
HttpClientHandler handler = new HttpClientHandler
{
Credentials = new NetworkCredential("developer", DevPass),
PreAuthenticate = true
};
using HttpClient httpClient = new(handler);
HttpResponseMessage response = await httpClient.GetAsync(requestUrl);
if (!response.IsSuccessStatusCode)
{
MainForm.Instance.AddToActionsList($"No success from {requestUrl} replied {response.StatusCode}");
return null;
}
byte[] imageBytes = await response.Content.ReadAsByteArrayAsync();
if (imageBytes.Length == 0) // Check if the imageBytes is empty
{
MainForm.Instance.AddToActionsList($"No image data received from {requestUrl}");
return null;
}
// Load image into Emgu CV Mat
Mat mat = new();
CvInvoke.Imdecode(imageBytes, ImreadModes.AnyColor, mat);
if (mat.IsEmpty)
{
MainForm.Instance.AddToActionsList("Failed to decode image with Emgu CV.");
return null;
}
// Downscale image to 25% resolution of 1080p
Mat downscaledMat = new();
CvInvoke.Resize(mat, downscaledMat, new Size(480, 270));
// Compress to JPEG at 75% quality
byte[] jpegBytes = downscaledMat.ToImage<Bgr, byte>().ToJpegData(75);
// Convert back to System.Drawing.Image
using MemoryStream ms = new(jpegBytes);
Image IMG = Image.FromStream(ms);
// Display image in picture box
if (SaveDisplay && PcBx != null)
PcBx.Image = (Image)IMG.Clone();
// Save image to disk
if (SaveDisplay && !string.IsNullOrEmpty(savePath))
IMG.Save(savePath, ImageFormat.Jpeg);
return IMG;
}
catch (HttpRequestException ex)
{
MainForm.Instance.AddToActionsList($"HTTP error: {ex.Message}");
return null;
}
catch (Exception ex)
{
MainForm.Instance.AddToActionsList($"Error processing image: {ex.Message}");
return null;
}
}
// Checks the images taken at different iris settings and compares their brightness.
// Also gets the colour snapshot
public static async Task ImageCheck(PictureBox PicBxOV, PictureBox PicBxF2, PictureBox PicBxF16, Label LblF2, Label LblF16, Camera CamOnTest)
{
// Take OV snapshot
Task<Image?> Colour_Response = GetProcessedImage("Colour-snapshot", CamOnTest.IP, CamOnTest.DevPass, LDS.MAVPath + LDS.OVsavePath, PicBxOV, true);
// Change to wide iris F2.0
await FlexiAPI.APIHTTPVISCA(CamOnTest.IP, "8101044B00000100FF", true);
await Task.Delay(200); // Wait for iris to settle before taking IR image
// Take IR bright light image
Image? F2_Response = await GetProcessedImage("Infrared-snapshot", CamOnTest.IP, CamOnTest.DevPass, LDS.MAVPath + LDS.IROpensavePath, PicBxF2, true);
if (F2_Response == null)
{
MainForm.Instance.AddToActionsList("IR F2.0 image response is blank.");
return;
}
// Change to tight iris F16.0
await FlexiAPI.APIHTTPVISCA(CamOnTest.IP, "8101044B00000004FF", true);
await Task.Delay(200); // Wait for iris to settle before taking IR image
// Take IR low light image
Image? F16_Response = await GetProcessedImage("Infrared-snapshot", CamOnTest.IP, CamOnTest.DevPass, LDS.MAVPath + LDS.IRTightsavePath, PicBxF16, true);
if (F16_Response == null)
{
MainForm.Instance.AddToActionsList("IR F16.0 image response is blank.");
return;
}
try
{
if (await Colour_Response == null)
{
MainForm.Instance.AddToActionsList("Colour image response is blank.");
return;
}
}
catch (Exception ex)
{
MainForm.Instance.AddToActionsList($"Error awaiting Colour snapshot: {ex.Message}");
return;
}
// Brightness test between min and max iris
try
{
double luminanceF2 = GetMeanLuminance(F2_Response);
double luminanceF16 = GetMeanLuminance(F16_Response);
LblF2.Text += luminanceF2 + "%";
LblF16.Text += luminanceF16 + "%";
if (luminanceF2 < luminanceF16 * 1.01)
{
MainForm.Instance.AddToActionsList("Insufficient luminance contrast between min and max iris");
LblF2.ForeColor = LblF16.ForeColor = Color.Red;
}
else
LblF2.ForeColor = LblF16.ForeColor = Color.LightGreen;
}
catch (Exception ex)
{
MainForm.Instance.AddToActionsList($"Error calculating luminance: {ex.Message}");
return;
}
}
public static double GetMeanLuminance(Image Img)
{
using Bitmap bmp = new(Img); // Convert from Image to Bitmap
using MemoryStream ms = new(); // Convert Bitmap to byte array
bmp.Save(ms, ImageFormat.Jpeg);
byte[] bmpBytes = ms.ToArray();
Mat mat = new();
CvInvoke.Imdecode(bmpBytes, ImreadModes.AnyColor, mat); // Convert to mat
Mat grayMat = new();
CvInvoke.CvtColor(mat, grayMat, ColorConversion.Bgr2Gray); // Convert to grayscale
MCvScalar mean = CvInvoke.Mean(grayMat); // Calculate mean luminance
// Translate to a percentage from a mean value between 0-255 8 bit grayscale value
return Math.Round((mean.V0 / 255) * 100, 4); // V0 contains the mean value for grayscale
}
}
}