feat: 将摄像头数据从生成的数据改为读取实际数据

This commit is contained in:
2025-07-03 15:47:00 +08:00
parent bed0158a5f
commit 178ac0de67
4 changed files with 583 additions and 80 deletions

View File

@@ -3,8 +3,10 @@ using System.Text;
using SixLabors.ImageSharp;
using SixLabors.ImageSharp.Formats.Jpeg;
using SixLabors.ImageSharp.PixelFormats;
using Peripherals.CameraClient; // 添加摄像头客户端引用
namespace server.Services;
/// <summary>
/// HTTP 视频流服务,用于从 FPGA 获取图像数据并推送到前端网页
/// 简化版本实现,先建立基础框架
@@ -18,6 +20,11 @@ public class HttpVideoStreamService : BackgroundService
private readonly int _frameWidth = 640;
private readonly int _frameHeight = 480;
// 摄像头客户端
private Camera? _camera;
private readonly string _cameraAddress = "192.168.1.100"; // 根据实际FPGA地址配置
private readonly int _cameraPort = 8888; // 根据实际端口配置
// 模拟 FPGA 图像数据
private int _frameCounter = 0;
private readonly List<HttpListenerResponse> _activeClients = new List<HttpListenerResponse>();
@@ -62,6 +69,8 @@ public class HttpVideoStreamService : BackgroundService
/// </summary>
public HttpVideoStreamService()
{
// 初始化摄像头客户端
_camera = new Camera(_cameraAddress, _cameraPort);
}
/// <summary>
@@ -353,57 +362,70 @@ public class HttpVideoStreamService : BackgroundService
}
/// <summary>
/// 模拟从 FPGA 获取图像数据的函数
/// 实际实现时,这里应该通过 UDP 连接读取 FPGA 特定地址范围的数据
/// 从 FPGA 获取图像数据
/// 实际从摄像头读取 RGB565 格式数据并转换为 RGB24
/// </summary>
private async Task<byte[]> GetFPGAImageData()
{
// 模拟异步 FPGA 数据读取
await Task.Delay(1);
// 简化的模拟图像数据生成
var random = new Random(_frameCounter);
var imageData = new byte[_frameWidth * _frameHeight * 3]; // RGB24 格式
// 生成简单的彩色噪声图案
for (int i = 0; i < imageData.Length; i += 3)
if (_camera == null)
{
// 基于帧计数器和位置生成颜色
var baseColor = (_frameCounter + i / 3) % 256;
imageData[i] = (byte)((baseColor + random.Next(0, 50)) % 256); // R
imageData[i + 1] = (byte)((baseColor * 2 + random.Next(0, 50)) % 256); // G
imageData[i + 2] = (byte)((baseColor * 3 + random.Next(0, 50)) % 256); // B
logger.Error("摄像头客户端未初始化");
return new byte[0];
}
if (_frameCounter % 30 == 0) // 每秒更新一次日志
try
{
logger.Debug("生成第 {FrameNumber} 帧", _frameCounter);
}
// 从摄像头读取帧数据
var result = await _camera.ReadFrame();
if (!result.IsSuccessful)
{
logger.Error("读取摄像头帧数据失败: {Error}", result.Error);
return new byte[0];
}
return imageData;
var rgb565Data = result.Value;
// 验证数据长度是否正确
if (!Common.Image.ValidateImageDataLength(rgb565Data, _frameWidth, _frameHeight, 2))
{
logger.Warn("摄像头数据长度不匹配,期望: {Expected}, 实际: {Actual}",
_frameWidth * _frameHeight * 2, rgb565Data.Length);
}
// 将 RGB565 转换为 RGB24
var rgb24Result = Common.Image.ConvertRGB565ToRGB24(rgb565Data, _frameWidth, _frameHeight);
if (!rgb24Result.IsSuccessful)
{
logger.Error("RGB565转RGB24失败: {Error}", rgb24Result.Error);
return new byte[0];
}
if (_frameCounter % 30 == 0) // 每秒更新一次日志
{
logger.Debug("成功获取第 {FrameNumber} 帧RGB565大小: {RGB565Size} 字节, RGB24大小: {RGB24Size} 字节",
_frameCounter, rgb565Data.Length, rgb24Result.Value.Length);
}
return rgb24Result.Value;
}
catch (Exception ex)
{
logger.Error(ex, "获取FPGA图像数据时发生错误");
return new byte[0];
}
}
/// <summary>
/// 将 RGB 图像数据转换为 JPEG 格式
/// </summary>
private byte[] ConvertToJpeg(byte[] rgbData)
private async Task<byte[]> ConvertToJpeg(byte[] rgbData)
{
using var image = new Image<Rgb24>(_frameWidth, _frameHeight);
// 将 RGB 数据复制到 ImageSharp 图像
for (int y = 0; y < _frameHeight; y++)
var jpegResult = Common.Image.ConvertRGB24ToJpeg(rgbData, _frameWidth, _frameHeight, 80);
if (!jpegResult.IsSuccessful)
{
for (int x = 0; x < _frameWidth; x++)
{
int index = (y * _frameWidth + x) * 3;
var pixel = new Rgb24(rgbData[index], rgbData[index + 1], rgbData[index + 2]);
image[x, y] = pixel;
}
logger.Error("RGB24转JPEG失败: {Error}", jpegResult.Error);
return new byte[0];
}
using var stream = new MemoryStream();
image.SaveAsJpeg(stream, new JpegEncoder { Quality = 80 });
return stream.ToArray();
return jpegResult.Value;
}
/// <summary>
@@ -417,9 +439,9 @@ public class HttpVideoStreamService : BackgroundService
return;
}
// 准备MJPEG帧数据
var mjpegFrameHeader = $"--boundary\r\nContent-Type: image/jpeg\r\nContent-Length: {frameData.Length}\r\n\r\n";
var headerBytes = Encoding.ASCII.GetBytes(mjpegFrameHeader);
// 使用Common中的方法准备MJPEG帧数据
var mjpegFrameHeader = Common.Image.CreateMjpegFrameHeader(frameData.Length);
var mjpegFrameFooter = Common.Image.CreateMjpegFrameFooter();
var clientsToRemove = new List<HttpListenerResponse>();
var clientsToProcess = new List<HttpListenerResponse>();
@@ -441,13 +463,13 @@ public class HttpVideoStreamService : BackgroundService
try
{
// 发送帧头部
await client.OutputStream.WriteAsync(headerBytes, 0, headerBytes.Length, cancellationToken);
await client.OutputStream.WriteAsync(mjpegFrameHeader, 0, mjpegFrameHeader.Length, cancellationToken);
// 发送JPEG数据
await client.OutputStream.WriteAsync(frameData, 0, frameData.Length, cancellationToken);
// 发送结尾换行符
await client.OutputStream.WriteAsync(Encoding.ASCII.GetBytes("\r\n"), 0, 2, cancellationToken);
await client.OutputStream.WriteAsync(mjpegFrameFooter, 0, mjpegFrameFooter.Length, cancellationToken);
// 确保数据立即发送
await client.OutputStream.FlushAsync(cancellationToken);