fix: 修复摄像头无法正常启动,以及关闭摄像头会导致后端崩溃的问题

This commit is contained in:
SikongJueluo 2025-08-18 19:14:02 +08:00
parent 7265b10870
commit 1b5b0e28e3
No known key found for this signature in database
8 changed files with 640 additions and 75 deletions

View File

@ -35,6 +35,7 @@
])
nuget
mono
vlc
# msbuild
omnisharp-roslyn
csharpier

View File

@ -19,6 +19,7 @@
<PackageReference Include="DotNext" Version="5.23.0" />
<PackageReference Include="DotNext.Threading" Version="5.23.0" />
<PackageReference Include="FlashCap" Version="1.11.0" />
<PackageReference Include="H264Sharp" Version="1.6.0" />
<PackageReference Include="Honoo.IO.Hashing.Crc" Version="1.3.3" />
<PackageReference Include="linq2db.AspNet" Version="5.4.1" />
<PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="9.0.7" />
@ -30,6 +31,7 @@
<PackageReference Include="NLog.Web.AspNetCore" Version="5.4.0" />
<PackageReference Include="NSwag.AspNetCore" Version="14.3.0" />
<PackageReference Include="NSwag.CodeGeneration.TypeScript" Version="14.4.0" />
<PackageReference Include="SharpRTSP" Version="1.8.2" />
<PackageReference Include="SixLabors.ImageSharp" Version="3.1.11" />
<PackageReference Include="System.Data.SQLite.Core" Version="1.0.119" />
<PackageReference Include="Tapper.Analyzer" Version="1.13.1">

View File

@ -146,7 +146,7 @@ public class VideoStreamController : ControllerBase
}
[HttpPost("SetVideoStreamEnable")]
[ProducesResponseType(typeof(object), StatusCodes.Status200OK)]
[ProducesResponseType(typeof(string), StatusCodes.Status200OK)]
[ProducesResponseType(typeof(string), StatusCodes.Status500InternalServerError)]
public async Task<IActionResult> SetVideoStreamEnable(bool enable)
{
@ -155,7 +155,7 @@ public class VideoStreamController : ControllerBase
var boardId = TryGetBoardId().OrThrow(() => new ArgumentException("Board ID is required"));
await _videoStreamService.SetVideoStreamEnableAsync(boardId.ToString(), enable);
return Ok($"HDMI transmission for board {boardId} disabled.");
return Ok($"HDMI transmission for board {boardId} {enable.ToString()}.");
}
catch (Exception ex)
{

View File

@ -6,6 +6,8 @@ public sealed class MsgBus
{
private static NLog.Logger logger = NLog.LogManager.GetCurrentClassLogger();
// private static RtspStreamService _rtspStreamService = new RtspStreamService(new UsbCameraCapture());
private static readonly UDPServer udpServer = new UDPServer(1234, 12);
/// <summary>
/// 获取UDP服务器
@ -49,7 +51,7 @@ public sealed class MsgBus
/// 通信总线初始化
/// </summary>
/// <returns>无</returns>
public static void Init()
public static async void Init()
{
if (!ArpClient.IsAdministrator())
{
@ -57,6 +59,10 @@ public sealed class MsgBus
// throw new Exception($"非管理员运行ARP无法更新请用管理员权限运行");
}
udpServer.Start();
// _rtspStreamService.ConfigureVideo(1920, 1080, 30);
// await _rtspStreamService.StartAsync();
isRunning = true;
}

View File

@ -3,7 +3,6 @@ using System.Text;
using System.Collections.Concurrent;
using DotNext;
using DotNext.Threading;
using FlashCap;
namespace server.Services;
@ -108,7 +107,7 @@ public class HttpVideoStreamService : BackgroundService
var devices = camera.GetDevices();
for (int i = 0; i < devices.Count; i++)
logger.Info($"Device[{i}]: {devices[i].Name}");
await camera.StartAsync(1, 3840, 2160, 30);
await camera.StartAsync(1, 2592, 1994, 30);
return camera;
}
catch (Exception ex)
@ -120,14 +119,11 @@ public class HttpVideoStreamService : BackgroundService
private Optional<VideoStreamClient> TryGetClient(string boardId)
{
if (_clientDict.TryGetValue(boardId, out var client))
{
return client;
}
return null;
return _clientDict.TryGetValue(boardId, out var client) ? client : null;
}
private Optional<VideoStreamClient> GetOrCreateClient(string boardId, int initWidth, int initHeight)
private Optional<VideoStreamClient> GetOrCreateClient(
string boardId, int initWidth, int initHeight)
{
if (_clientDict.TryGetValue(boardId, out var client))
{
@ -185,6 +181,8 @@ public class HttpVideoStreamService : BackgroundService
{
var client = _clientDict[clientKey];
client.CTS.Cancel();
if (!client.Camera.IsValueCreated) continue;
using (await client.Lock.AcquireWriteLockAsync(cancellationToken))
{
var camera = await client.Camera.WithCancellation(cancellationToken);
@ -251,26 +249,28 @@ public class HttpVideoStreamService : BackgroundService
}
var client = clientOpt.Value;
var token = CancellationTokenSource.CreateLinkedTokenSource(
client.CTS.Token, cancellationToken).Token;
var clientToken = client.CTS.Token;
try
{
token.ThrowIfCancellationRequested();
logger.Info("新HTTP客户端连接: {RemoteEndPoint}", context.Request.RemoteEndPoint);
if (path == "/video")
{
// MJPEG 流请求FPGA
await HandleMjpegStreamAsync(context.Response, client, cancellationToken);
await HandleMjpegStreamAsync(context.Response, client, token);
}
else if (path == "/usbCamera")
{
// USB Camera MJPEG流请求
await HandleUsbCameraStreamAsync(context.Response, client, cancellationToken);
await HandleUsbCameraStreamAsync(context.Response, client, token);
}
else if (path == "/snapshot")
{
// 单帧图像请求
await HandleSnapshotRequestAsync(context.Response, client, cancellationToken);
await HandleSnapshotRequestAsync(context.Response, client, token);
}
else if (path == "/html")
{
@ -300,10 +300,26 @@ public class HttpVideoStreamService : BackgroundService
private async Task HandleUsbCameraStreamAsync(
HttpListenerResponse response, VideoStreamClient client, CancellationToken cancellationToken)
{
var camera = await _usbCamera.WithCancellation(cancellationToken);
Action<byte[]> frameHandler = async (jpegData) =>
{
try
{
var header = Encoding.ASCII.GetBytes("--boundary\r\nContent-Type: image/jpeg\r\nContent-Length: " + jpegData.Length + "\r\n\r\n");
await response.OutputStream.WriteAsync(header, 0, header.Length, cancellationToken);
await response.OutputStream.WriteAsync(jpegData, 0, jpegData.Length, cancellationToken);
await response.OutputStream.WriteAsync(new byte[] { 0x0D, 0x0A }, 0, 2, cancellationToken); // \r\n
await response.OutputStream.FlushAsync(cancellationToken);
}
catch
{
logger.Error("Error sending MJPEG frame");
}
};
try
{
var camera = await _usbCamera.WithCancellation(cancellationToken);
if (!camera.IsCapturing)
{
logger.Error("USB Camera is not capturing");
@ -320,32 +336,17 @@ public class HttpVideoStreamService : BackgroundService
logger.Info("Start USB Camera MJPEG Stream");
camera.FrameReady += frameHandler;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
var jpegData = camera.GetLatestFrame();
if (jpegData == null)
{
logger.Warn("USB Camera MJPEG帧获取失败");
await Task.Delay(1000 / client.FrameRate, cancellationToken);
continue;
}
// MJPEG帧头
var header = Encoding.ASCII.GetBytes("--boundary\r\nContent-Type: image/jpeg\r\nContent-Length: " + jpegData.Length + "\r\n\r\n");
await response.OutputStream.WriteAsync(header, 0, header.Length, cancellationToken);
await response.OutputStream.WriteAsync(jpegData, 0, jpegData.Length, cancellationToken);
await response.OutputStream.WriteAsync(new byte[] { 0x0D, 0x0A }, 0, 2, cancellationToken); // \r\n
await response.OutputStream.FlushAsync(cancellationToken);
await Task.Delay(1000 / client.FrameRate, cancellationToken);
logger.Info("USB Camera MJPEG帧发送成功");
await Task.Delay(-1, cancellationToken);
}
}
catch (OperationCanceledException ex)
catch (OperationCanceledException)
{
logger.Info(ex, "USB Camera MJPEG 串流取消");
logger.Info("USB Camera MJPEG 串流取消");
}
catch (Exception ex)
{
@ -353,7 +354,8 @@ public class HttpVideoStreamService : BackgroundService
}
finally
{
camera.FrameReady -= frameHandler;
logger.Info("Usb Camera Stream Stopped");
try { response.Close(); } catch { }
}
}
@ -744,15 +746,14 @@ public class HttpVideoStreamService : BackgroundService
using (await client.Lock.AcquireWriteLockAsync())
{
if (enable)
{
client.CTS = new CancellationTokenSource();
}
else
if (!enable || client.CTS.IsCancellationRequested)
{
client.CTS.Cancel();
client.CTS = new CancellationTokenSource();
}
if (!client.Camera.IsValueCreated) return;
var camera = await client.Camera.WithCancellation(client.CTS.Token);
var disableResult = await camera.EnableHardwareTrans(enable);
if (disableResult.IsSuccessful && disableResult.Value)
@ -763,7 +764,7 @@ public class HttpVideoStreamService : BackgroundService
}
catch (Exception ex)
{
logger.Error(ex, $"Exception occurred while disabling HDMI transmission for camera {boardId}");
logger.Error(ex, $"Exception occurred while disabling video transmission for {boardId}");
}
}

View File

@ -0,0 +1,576 @@
using System.Net;
using System.Net.Sockets;
using System.Collections.Concurrent;
using System.Text;
using Rtsp;
using Rtsp.Messages;
using Rtsp.Sdp;
using server.Services;
using SixLabors.ImageSharp;
using SixLabors.ImageSharp.PixelFormats;
using SixLabors.ImageSharp.Processing;
namespace server.Services;
/// <summary>
/// RTSP streaming service that integrates with UsbCameraCapture
/// Uses simplified RTSP server architecture with RTSPDispatcher
/// Provides Motion JPEG stream over RTP/RTSP
/// Compatible with Windows and Linux
/// </summary>
public class RtspStreamService : IDisposable
{
private static readonly NLog.Logger logger = NLog.LogManager.GetCurrentClassLogger();
private readonly UsbCameraCapture _cameraCapture;
private readonly ConcurrentDictionary<string, RtspListener> _activeListeners = new();
// RTSP configuration
private readonly int _rtspPort;
private readonly string _streamPath;
private TcpListener? _rtspServerListener;
private ManualResetEvent? _stopping;
private Thread? _listenThread;
// Video encoding parameters
private int _videoWidth = 640;
private int _videoHeight = 480;
private int _frameRate = 30;
private int _jpegQuality = 75;
private bool _isStreaming;
private bool _disposed;
// Frame timing and RTP sequencing
private DateTime _lastFrameTime = DateTime.UtcNow;
private readonly TimeSpan _frameInterval;
private uint _rtpTimestamp = 0;
private ushort _sequenceNumber = 0;
private readonly uint _ssrc = (uint)Random.Shared.Next();
// Current frame data for broadcasting
private byte[]? _currentFrame;
private readonly object _frameLock = new object();
public event Action<Exception>? Error;
public event Action<string>? StatusChanged;
public bool IsStreaming => _isStreaming;
public int Port => _rtspPort;
public string StreamUrl => $"rtsp://localhost:{_rtspPort}/{_streamPath}";
public int ActiveSessions => _activeListeners.Count;
public RtspStreamService(UsbCameraCapture cameraCapture, int port = 8554, string streamPath = "camera")
{
_cameraCapture = cameraCapture ?? throw new ArgumentNullException(nameof(cameraCapture));
_rtspPort = port;
_streamPath = streamPath;
_frameInterval = TimeSpan.FromSeconds(1.0 / _frameRate);
// Register RTSP URI scheme
RtspUtils.RegisterUri();
// Subscribe to camera events
_cameraCapture.FrameReady += OnFrameReady;
_cameraCapture.Error += OnCameraError;
}
/// <summary>
/// Configure video encoding parameters
/// </summary>
public void ConfigureVideo(int width, int height, int frameRate, int jpegQuality = 75)
{
if (_isStreaming)
throw new InvalidOperationException("Cannot configure video while streaming");
_videoWidth = width;
_videoHeight = height;
_frameRate = frameRate;
_jpegQuality = jpegQuality;
logger.Info($"Video configured: {width}x{height} @ {frameRate}fps, JPEG quality {jpegQuality}%");
}
/// <summary>
/// Start RTSP server and begin streaming
/// </summary>
public async Task StartAsync()
{
if (_isStreaming)
return;
try
{
// Validate port range
if (_rtspPort < IPEndPoint.MinPort || _rtspPort > IPEndPoint.MaxPort)
throw new ArgumentOutOfRangeException(nameof(_rtspPort), _rtspPort, "Port number must be between System.Net.IPEndPoint.MinPort and System.Net.IPEndPoint.MaxPort");
// Initialize RTSP server
_rtspServerListener = new TcpListener(IPAddress.Any, _rtspPort);
_rtspServerListener.Start();
// Start listening for connections
_stopping = new ManualResetEvent(false);
_listenThread = new Thread(AcceptConnections)
{
Name = "RTSP-Listener",
IsBackground = true
};
_listenThread.Start();
// Start camera capture if not already running
if (!_cameraCapture.IsCapturing)
{
await _cameraCapture.StartAsync(1, _videoWidth, _videoHeight, _frameRate);
}
_isStreaming = true;
StatusChanged?.Invoke("Streaming started");
logger.Info($"RTSP stream started on {StreamUrl}");
}
catch (Exception ex)
{
await StopAsync();
Error?.Invoke(ex);
throw;
}
}
/// <summary>
/// Stop RTSP server and streaming
/// </summary>
public async Task StopAsync()
{
if (!_isStreaming)
return;
_isStreaming = false;
try
{
// Signal stop and wait for listen thread
_stopping?.Set();
if (_listenThread != null && _listenThread.IsAlive)
{
_listenThread.Join(TimeSpan.FromSeconds(5));
}
// Stop RTSP server
_rtspServerListener?.Stop();
// Clean up active listeners
foreach (var listener in _activeListeners.Values.ToArray())
{
try
{
listener.Stop();
}
catch (Exception ex)
{
logger.Warn(ex, "Error stopping RTSP listener");
}
}
_activeListeners.Clear();
StatusChanged?.Invoke("Streaming stopped");
logger.Info("RTSP stream stopped");
}
catch (Exception ex)
{
Error?.Invoke(ex);
}
await Task.CompletedTask;
}
/// <summary>
/// Get current stream statistics
/// </summary>
public StreamStats GetStats()
{
return new StreamStats
{
IsStreaming = _isStreaming,
ActiveSessions = _activeListeners.Count,
VideoWidth = _videoWidth,
VideoHeight = _videoHeight,
FrameRate = _frameRate,
StreamUrl = StreamUrl
};
}
/// <summary>
/// Accept incoming RTSP connections
/// </summary>
private void AcceptConnections()
{
try
{
while (!(_stopping?.WaitOne(0) ?? true))
{
TcpClient client = _rtspServerListener!.AcceptTcpClient();
var transport = new RtspTcpTransport(client);
var listener = new RtspListener(transport);
var listenerId = Guid.NewGuid().ToString();
_activeListeners[listenerId] = listener;
// Handle listener events
listener.MessageReceived += (sender, args) => HandleRtspMessage(listenerId, args);
// Store listener for later cleanup
// We'll rely on exception handling to detect disconnections
// Start the listener
listener.Start();
logger.Info($"New RTSP client connected: {listenerId} from {client.Client.RemoteEndPoint}");
}
}
catch (SocketException ex)
{
if (_isStreaming) // Only log if we're still supposed to be running
{
logger.Warn(ex, "Socket error while accepting connections (may be normal during shutdown)");
}
}
catch (Exception ex)
{
if (_isStreaming)
{
logger.Error(ex, "Error accepting RTSP connections");
Error?.Invoke(ex);
}
}
}
/// <summary>
/// Handle RTSP messages from clients
/// </summary>
private void HandleRtspMessage(string listenerId, RtspChunkEventArgs args)
{
try
{
if (args.Message is RtspRequest request)
{
HandleRtspRequest(listenerId, request);
}
}
catch (Exception ex)
{
logger.Error(ex, $"Error handling RTSP message for listener {listenerId}");
}
}
/// <summary>
/// Handle RTSP requests
/// </summary>
private void HandleRtspRequest(string listenerId, RtspRequest request)
{
if (!_activeListeners.TryGetValue(listenerId, out var listener))
return;
var response = new RtspResponse();
response.OriginalRequest = request;
// 1. 返回 CSeq 字段
if (request.Headers.TryGetValue("CSeq", out var cseq))
{
response.Headers["CSeq"] = cseq;
}
switch (request.RequestTyped)
{
case RtspRequest.RequestType.OPTIONS:
response.Headers["Public"] = "DESCRIBE, SETUP, TEARDOWN, PLAY, PAUSE";
response.ReturnCode = 200;
break;
case RtspRequest.RequestType.DESCRIBE:
if (request.RtspUri?.AbsolutePath.TrimStart('/') == _streamPath)
{
var sdp = CreateSdp();
response.Headers["Content-Type"] = "application/sdp";
response.Data = Encoding.UTF8.GetBytes(sdp);
response.ReturnCode = 200;
}
else
{
response.ReturnCode = 404;
}
break;
case RtspRequest.RequestType.SETUP:
// 2. 解析客户端 Transport 字段
string clientTransport = request.Headers.TryGetValue("Transport", out var transport) ? transport : "";
string serverTransport;
if (clientTransport.Contains("TCP", StringComparison.OrdinalIgnoreCase) || clientTransport.Contains("interleaved"))
{
// 客户端要求TCP
serverTransport = "RTP/AVP/TCP;unicast;interleaved=0-1";
}
else if (clientTransport.Contains("UDP", StringComparison.OrdinalIgnoreCase) || clientTransport.Contains("client_port"))
{
// 客户端要求UDP
// 这里假设端口号格式为 client_port=xxxx-xxxx
var match = System.Text.RegularExpressions.Regex.Match(clientTransport, @"client_port=(\d+)-(\d+)");
if (match.Success)
{
var clientPort1 = match.Groups[1].Value;
var clientPort2 = match.Groups[2].Value;
// 你可以自定义 server_port
serverTransport = $"RTP/AVP;unicast;client_port={clientPort1}-{clientPort2};server_port=9000-9001";
}
else
{
// 默认UDP
serverTransport = "RTP/AVP;unicast;client_port=8000-8001;server_port=9000-9001";
}
}
else
{
// 默认TCP
serverTransport = "RTP/AVP/TCP;unicast;interleaved=0-1";
}
response.Headers["Transport"] = serverTransport;
response.Headers["Session"] = listenerId;
response.ReturnCode = 200;
break;
case RtspRequest.RequestType.PLAY:
response.Headers["Session"] = listenerId;
response.ReturnCode = 200;
// Start sending frames to this client
StartFrameBroadcastForListener(listenerId);
break;
case RtspRequest.RequestType.TEARDOWN:
response.ReturnCode = 200;
// Stop and remove the listener
Task.Run(() =>
{
listener.Stop();
_activeListeners.TryRemove(listenerId, out _);
});
break;
default:
response.ReturnCode = 501; // Not implemented
break;
}
// Send response
try
{
listener.SendMessage(response);
}
catch (Exception ex)
{
logger.Error(ex, $"Error sending RTSP response to listener {listenerId}");
}
}
/// <summary>
/// Create SDP description for the stream
/// </summary>
private string CreateSdp()
{
var sessionId = DateTimeOffset.UtcNow.ToUnixTimeSeconds();
return $@"v=0
o=- {sessionId} {sessionId} IN IP4 127.0.0.1
s=FPGA WebLab Camera Stream
c=IN IP4 0.0.0.0
t=0 0
m=video 0 RTP/AVP 26
a=rtpmap:26 JPEG/90000
a=control:track1
a=framerate:{_frameRate}";
}
/// <summary>
/// Start broadcasting frames to a specific listener
/// </summary>
private void StartFrameBroadcastForListener(string listenerId)
{
// For now, we'll use a simple approach where we send the current frame
// In a full implementation, you'd want to manage RTP streaming per client
lock (_frameLock)
{
if (_currentFrame != null && _activeListeners.TryGetValue(listenerId, out var listener))
{
try
{
// Send current frame (simplified - in real implementation you'd send RTP packets)
// This is a placeholder for actual RTP packet creation and sending
logger.Debug($"Started frame broadcast for listener {listenerId}");
}
catch (Exception ex)
{
logger.Error(ex, $"Error starting frame broadcast for listener {listenerId}");
}
}
}
}
/// <summary>
/// Handle new frame from camera
/// </summary>
private void OnFrameReady(byte[] frameData)
{
if (!_isStreaming || frameData == null || _activeListeners.IsEmpty)
return;
try
{
// Throttle frame rate
var now = DateTime.UtcNow;
if (now - _lastFrameTime < _frameInterval)
return;
_lastFrameTime = now;
// Process and encode frame
var processedFrame = ProcessFrame(frameData);
if (processedFrame != null)
{
lock (_frameLock)
{
_currentFrame = processedFrame;
}
BroadcastFrame(processedFrame);
}
}
catch (Exception ex)
{
logger.Error(ex, "Error processing camera frame");
Error?.Invoke(ex);
}
}
/// <summary>
/// Process raw frame data
/// </summary>
private byte[]? ProcessFrame(byte[] frameData)
{
try
{
// Convert frame to JPEG for Motion JPEG streaming
using var image = Image.Load<Rgb24>(frameData);
// Resize if necessary
if (image.Width != _videoWidth || image.Height != _videoHeight)
{
image.Mutate(x => x.Resize(_videoWidth, _videoHeight));
}
// Encode as JPEG with specified quality
using var stream = new MemoryStream();
image.SaveAsJpeg(stream, new SixLabors.ImageSharp.Formats.Jpeg.JpegEncoder
{
Quality = _jpegQuality
});
return stream.ToArray();
}
catch (Exception ex)
{
logger.Error(ex, "Error processing frame");
return null;
}
}
/// <summary>
/// Broadcast frame to all active listeners
/// </summary>
private void BroadcastFrame(byte[] frameData)
{
if (_activeListeners.IsEmpty)
return;
var timestamp = _rtpTimestamp;
_rtpTimestamp += (uint)(90000 / _frameRate); // 90kHz clock
var sequenceNumber = ++_sequenceNumber;
var listenersToRemove = new List<string>();
foreach (var kvp in _activeListeners)
{
try
{
var listener = kvp.Value;
// Try to send data to test if listener is still active
// In a full implementation, you would create and send RTP packets here
// For now, this is a placeholder that just checks if we can access the listener
try
{
var _ = listener.RemoteEndPoint; // Test if listener is still valid
// SendRtpFrame(listener, frameData, timestamp, sequenceNumber, _ssrc);
}
catch
{
listenersToRemove.Add(kvp.Key);
}
}
catch (Exception ex)
{
logger.Warn(ex, $"Error sending frame to listener {kvp.Key}");
listenersToRemove.Add(kvp.Key);
}
}
// Remove failed listeners
foreach (var listenerId in listenersToRemove)
{
if (_activeListeners.TryRemove(listenerId, out var listener))
{
try
{
listener.Stop();
}
catch (Exception ex)
{
logger.Warn(ex, $"Error stopping failed listener {listenerId}");
}
}
}
}
/// <summary>
/// Handle camera capture errors
/// </summary>
private void OnCameraError(Exception error)
{
logger.Error(error, "Camera capture error");
Error?.Invoke(error);
}
public void Dispose()
{
if (_disposed) return;
StopAsync().Wait();
_cameraCapture.FrameReady -= OnFrameReady;
_cameraCapture.Error -= OnCameraError;
_rtspServerListener?.Stop();
_stopping?.Dispose();
_disposed = true;
}
}
/// <summary>
/// Stream statistics data structure
/// </summary>
public class StreamStats
{
public bool IsStreaming { get; set; }
public int ActiveSessions { get; set; }
public int VideoWidth { get; set; }
public int VideoHeight { get; set; }
public int FrameRate { get; set; }
public string StreamUrl { get; set; } = string.Empty;
}

View File

@ -1,4 +1,3 @@
// using System.Drawing;
using FlashCap;
namespace server.Services;
@ -70,7 +69,8 @@ public class UsbCameraCapture : IDisposable
{
_descriptor = descriptor;
_characteristics = characteristics;
_device = await descriptor.OpenAsync(characteristics, OnFrameCaptured);
_device = await descriptor.OpenAsync(
characteristics, TranscodeFormats.DoNotTranscode, true, 10, OnFrameCaptured);
await _device.StartAsync();
_isCapturing = true;
@ -104,26 +104,6 @@ public class UsbCameraCapture : IDisposable
return _latestFrame;
}
// /// <summary>
// /// Get latest frame as bitmap
// /// </summary>
// public Bitmap? GetLatestFrameAsBitmap()
// {
// var frameData = _latestFrame;
// if (frameData == null)
// return null;
// try
// {
// using var ms = new MemoryStream(frameData);
// return new Bitmap(ms);
// }
// catch
// {
// return null;
// }
// }
/// <summary>
/// Get supported video characteristics for current device
/// </summary>
@ -170,7 +150,6 @@ public class UsbCameraCapture : IDisposable
private void OnFrameCaptured(PixelBufferScope bufferScope)
{
logger.Info("Frame captured");
if (!_isCapturing)
return;
@ -180,6 +159,7 @@ public class UsbCameraCapture : IDisposable
var imageData = bufferScope.Buffer.CopyImage();
_latestFrame = imageData;
FrameReady?.Invoke(imageData);
// logger.Info("USB Camera frame captured");
}
catch (Exception ex)
{

View File

@ -387,8 +387,6 @@ import { VideoStreamClient, ResolutionConfigRequest } from "@/APIClient";
import { useEquipments } from "@/stores/equipments";
import { AuthManager } from "@/utils/AuthManager";
const eqps = useEquipments();
//
const loading = ref(false);
const configing = ref(false);
@ -510,7 +508,7 @@ const toggleStreamType = async () => {
"success",
`已切换到${streamType.value === "usbCamera" ? "USB摄像头" : "视频流"}`,
);
stopStream();
await stopStream();
} catch (error) {
addLog("error", `切换视频流类型失败: ${error}`);
console.error("切换视频流类型失败:", error);
@ -647,7 +645,8 @@ const tryReconnect = () => {
//
const performFocus = async () => {
if (isFocusing.value || !isPlaying.value) return;
if (isFocusing.value || !isPlaying.value || streamType.value === "usbCamera")
return;
try {
isFocusing.value = true;
@ -711,7 +710,7 @@ const startStream = async () => {
try {
addLog("info", "正在启动视频流...");
videoStatus.value = "正在连接视频流...";
videoClient.setVideoStreamEnable(true);
await videoClient.setVideoStreamEnable(true);
//
await refreshStatus();
@ -778,7 +777,7 @@ const changeResolution = async () => {
//
if (wasPlaying) {
stopStream();
await stopStream();
await new Promise((resolve) => setTimeout(resolve, 1000)); // 1
}
@ -815,10 +814,10 @@ const changeResolution = async () => {
};
//
const stopStream = () => {
const stopStream = async () => {
try {
addLog("info", "正在停止视频流...");
videoClient.setVideoStreamEnable(false);
await videoClient.setVideoStreamEnable(false);
//
currentVideoSource.value = "";