Quellcode durchsuchen

厦门增加需求,剔除手动识别数据内部的线段

ZIM vor 1 Jahr
Ursprung
Commit
7ac154dc95

+ 46 - 39
Assets/InfraredProject/WebCamera/Script/ZIM/InfraredLocate/ScreenIdentification.cs

@@ -87,6 +87,9 @@ namespace o0.Project
 
         public void SetScreenQuad(QuadrilateralInCamera quad) => Screen.QuadInCamera = quad;
 
+        // 上一次半自动识别的情况,false代表这条边识别失败,线段顺序: 下、右、上、左
+        public bool[] LastQuadSemiAutoState;
+
         public event Action OnLocateScreenEnter;
         public event Action OnLocateScreenEnd;
         public bool bStartLocateScreen { get; set; } = false;//是否进行捕获
@@ -146,7 +149,8 @@ namespace o0.Project
         /// 初始化了两个数据 capture 和 delay
         /// </summary>
         /// <returns></returns>
-        public bool isInitLocateScreen() {
+        public bool isInitLocateScreen()
+        {
             return capture != 0 && delay != 0;
         }
 
@@ -167,28 +171,28 @@ namespace o0.Project
                     new QuadrilateralInCamera(quad, images[0].Size().o0Vector()));
 
                 // 透视变换
-//                var srcWidth = LocateLightedRedTex.width;
-//                var transformWidth = (int)((quad.B.x - quad.A.x + quad.D.x - quad.C.x) / 2);
-//                var transformHeight = (int)((quad.C.y - quad.A.y + quad.D.y - quad.B.y) / 2);
-//                var transformTex = new Texture2D(transformWidth, transformHeight);
-//                var pt = new ZIMPerspectiveTransform(new OrdinalQuadrilateral(new Vector(0, 0), new Vector(transformWidth, 0), new Vector(0, transformHeight), new Vector(transformWidth, transformHeight)), quad);
-//                var dstPixel = new UnityEngine.Color[transformWidth * transformHeight];
-//                var srcPixel = LocateLightedRedTex.GetPixels();
-//                Parallel.For(0, transformWidth, (x) =>
-//                {
-//                    for (int y = 0; y < transformHeight; y++)
-//                    {
-//                        var index = y * transformWidth + x;
-//                        var sampleCoord = pt.TransformRound(x, y);
-//                        dstPixel[index] = srcPixel[sampleCoord.y * srcWidth + sampleCoord.x];
-//                    }
-//                });
-//                transformTex.SetPixels(dstPixel);
-//                transformTex.Apply();
-//                //ScreenLocate.DebugTexture(1, transformTex);
-//#if (!NDEBUG && DEBUG && ENABLE_LOG)
-//                Console.WriteLine($"{TAG} ScreenLocate.DebugTexture 1:{transformTex.GetNativeTexturePtr()}");
-//#endif
+                //                var srcWidth = LocateLightedRedTex.width;
+                //                var transformWidth = (int)((quad.B.x - quad.A.x + quad.D.x - quad.C.x) / 2);
+                //                var transformHeight = (int)((quad.C.y - quad.A.y + quad.D.y - quad.B.y) / 2);
+                //                var transformTex = new Texture2D(transformWidth, transformHeight);
+                //                var pt = new ZIMPerspectiveTransform(new OrdinalQuadrilateral(new Vector(0, 0), new Vector(transformWidth, 0), new Vector(0, transformHeight), new Vector(transformWidth, transformHeight)), quad);
+                //                var dstPixel = new UnityEngine.Color[transformWidth * transformHeight];
+                //                var srcPixel = LocateLightedRedTex.GetPixels();
+                //                Parallel.For(0, transformWidth, (x) =>
+                //                {
+                //                    for (int y = 0; y < transformHeight; y++)
+                //                    {
+                //                        var index = y * transformWidth + x;
+                //                        var sampleCoord = pt.TransformRound(x, y);
+                //                        dstPixel[index] = srcPixel[sampleCoord.y * srcWidth + sampleCoord.x];
+                //                    }
+                //                });
+                //                transformTex.SetPixels(dstPixel);
+                //                transformTex.Apply();
+                //                //ScreenLocate.DebugTexture(1, transformTex);
+                //#if (!NDEBUG && DEBUG && ENABLE_LOG)
+                //                Console.WriteLine($"{TAG} ScreenLocate.DebugTexture 1:{transformTex.GetNativeTexturePtr()}");
+                //#endif
             }
 
             //times.Add(watch.ElapsedMilliseconds);
@@ -669,7 +673,7 @@ namespace o0.Project
             sw.Start();
 
             //读取数据
-            if (debugImages != null && debugImages.Count != 0) 
+            if (debugImages != null && debugImages.Count != 0)
             {
                 foreach (var i in debugImages)
                 {
@@ -693,7 +697,7 @@ namespace o0.Project
                 var whitePixel = new UnityEngine.Color[Size.x * Size.y];
                 Parallel.For(0, Size.x, x =>
                 {
-                    for (int y = 0; y < Size.y; y++) 
+                    for (int y = 0; y < Size.y; y++)
                     {
                         var i = y * Size.x + x;
                         var d = ScreenWhiteTexture[i] - ScreenBlackTexture[i];
@@ -754,7 +758,7 @@ namespace o0.Project
 
             #region 半自动识别
             List<LineIdentified> LineIdentifiedSemiAuto = new List<LineIdentified>();               // 线段顺序: 下、右、上、左
-            bool[] newLines = new bool[4] { true, true, true, true };
+            LastQuadSemiAutoState = new bool[4] { true, true, true, true };
             for (int i = 0; i < 4; i++)
             {
                 if (quadLinesSemiAuto[i] != null)
@@ -762,7 +766,7 @@ namespace o0.Project
                 else if (manualLines != null)
                 {
                     LineIdentifiedSemiAuto.Add(manualLines[i]);
-                    newLines[i] = false;
+                    LastQuadSemiAutoState[i] = false;
                 }
             }
 
@@ -788,7 +792,8 @@ namespace o0.Project
             {
                 Debug.Log("<color=aqua>[ScreenIdentification] 识别到四边形</color>");
                 quadTemp.Add(QuadSemiAuto.Quad);
-            }else if (QuadAuto != null)
+            }
+            else if (QuadAuto != null)
             {
                 Debug.Log("<color=aqua>[ScreenIdentification] 识别到四边形</color>");
                 quadTemp.Add(QuadAuto.Quad);
@@ -799,7 +804,7 @@ namespace o0.Project
             var ScreenQuadMap = new Matrix(Size, Tiling: true);     // 识别的到的屏幕四边形(半自动和自动在一张图上) 
             foreach (var i in LineIdentifiedSemiAuto.Index())
             {
-                if (newLines[i])
+                if (LastQuadSemiAutoState[i])
                     o0Extension.DrawLine(ScreenQuadMap, LineIdentifiedSemiAuto[i].DrawLine, (x, y) => 2, new Geometry2D.Float.Vector(0, 10));
                 else
                     o0Extension.DrawLine(ScreenQuadMap, LineIdentifiedSemiAuto[i].DrawLine, (x, y) => 1, new Geometry2D.Float.Vector(0, 6), true);
@@ -845,7 +850,7 @@ namespace o0.Project
             log += $"\r\n屏幕四边形_手动识别{QuadManual != null}\r\n屏幕四边形_半自动识别{QuadSemiAuto != null}\r\n屏幕四边形_全自动识别{QuadAuto != null}";
             Debug.Log(log);
             // 是否将图片保存到本地
-            if (ScreenLocate.Main.SaveToggle.isOn && ScreenLocate.Main.DebugOnZIMDemo)
+            if (ScreenLocate.Main.SaveToggle?.isOn ?? false && ScreenLocate.Main.DebugOnZIMDemo)
             {
                 var FileDirectory = $"Debug_屏幕定位/";
                 SaveImages(FileDirectory, log, ScreenLocateTexture, allLinesTex, ChoosableLineTex, ScreenQuad);
@@ -996,24 +1001,26 @@ namespace o0.Project
             var quadLinesSemiAuto = new List<(float, LineIdentified)>[4] { new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>() };
             var quadLinesAuto = new List<(float, LineIdentified)>[4] { new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>() };
             possibleLines = new List<LineIdentified>();
-            manualLines = null;
 
             // 如果已有手动定位数据,根据现有数据筛选线条(半自动)
+            manualLines = null;
             if (QuadManual != null)
             {
                 Debug.Log("[IdentifyLineLSD] 根据已有定位数据做筛选");
-                var calibration = ScreenLocate.Main.ReDoLocateCalibrationRatio * Size.y;
                 manualLines = QuadManual.GetLines().Select((i) => new LineIdentified(0, i, 0, 0, true)).ToArray();
-
-                var pedals = manualLines.Select((i) => o0Extension.PointPedal(i.Line, avgPoint, out _)).ToArray();     // 当前定位的垂足,下、右、上、左
+                var calibration = ScreenLocate.Main.ReDoLocateCalibrationRatio * Size.y;
+                var avgPointCross = manualLines.Select((i) => i.Line.LineCrossWithPoint(avgPoint)).ToArray();     // 对于平均点的corss值
+                var avgPointPedal = manualLines.Select((i) => o0Extension.PointPedal(i.Line, avgPoint, out _)).ToArray();     // 当前定位的垂足,下、右、上、左
 
                 foreach (var line in innerLines)
                 {
-                    // 筛选条件:1-梯度方向匹配,2-垂足的距离足够近, 3-新的线段的中点,到旧线段的垂足,要在旧线段内
+                    // 筛选条件:1-梯度方向匹配,2-垂足的距离足够近, 3-线段的AB点均在旧线段外部, 4-新的线段的中点,到旧线段的垂足,要在旧线段内
                     if (isScreenLine(line, out int index))
                     {
-                        var distanceToOld = (o0Extension.PointPedal(line.Line, avgPoint, out _) - pedals[index]).Length;
-                        if (distanceToOld < calibration)
+                        var distanceToOld = (o0Extension.PointPedal(line.Line, avgPoint, out _) - avgPointPedal[index]).Length;
+                        if (distanceToOld < calibration &&
+                            manualLines[index].Line.LineCrossWithPoint(line.Line.A) * avgPointCross[index] <= 0 &&
+                            manualLines[index].Line.LineCrossWithPoint(line.Line.B) * avgPointCross[index] <= 0)
                         {
                             var middleToOldLine = o0Extension.PointPedal(manualLines[index].Line, (line.Line.A + line.Line.B) / 2, out bool inLineSegment);
                             if (inLineSegment)
@@ -1029,7 +1036,7 @@ namespace o0.Project
             // 全自动
             foreach (var line in allLines)
             {
-                if (isScreenLine(line , out int index))
+                if (isScreenLine(line, out int index))
                 {
                     if (line.Batch < 1)     // 全自动只处理第一张图,默认是色差图
                     {
@@ -1050,7 +1057,7 @@ namespace o0.Project
             return (resultSemiAuto.ToList(), resultAuto.ToList());
         }
 
-        void SaveImages(string FileDirectory, string log, 
+        void SaveImages(string FileDirectory, string log,
             Texture2D ScreenLocateTex, Texture2D allLinesTex, Texture2D ChoosableLineTex, Texture2D ScreenQuadTex)
         {
             if (!Directory.Exists(FileDirectory))

+ 47 - 44
Assets/InfraredProject/WebCamera/Script/ZIM/Other/QuadrilateralInCamera.cs

@@ -38,7 +38,7 @@ namespace ZIM.Unity
         public QuadrilateralInCamera(IEnumerable<Vector2> enumable, Vector cameraSize)
         {
             CameraSize = cameraSize;
-            Quad = new OrdinalQuadrilateral(enumable.Select((i=>i.o0Vector())));
+            Quad = new OrdinalQuadrilateral(enumable.Select((i => i.o0Vector())));
         }
 
         public QuadrilateralInCamera(OrdinalQuadrilateral quad, Vector cameraSize)
@@ -92,50 +92,53 @@ namespace ZIM.Unity
         // 摄像机分辨率变化时调用
         public void ReSize(Vector sizeNew, AspectRatioSetting viewAspectRatioSetting)
         {
-            float ratio;
-            Func<Vector, Vector> Translate;
-            List<Vector> vertices;
-            switch (viewAspectRatioSetting)
+            if (CameraSize != sizeNew)
             {
-                case AspectRatioSetting.FixedAll:
-                    var scale = new Vector(sizeNew.x / CameraSize.x, sizeNew.y / CameraSize.y);
-                    CameraSize = sizeNew;
-                    for (int i = 0; i < Quad.Count; i++)
-                        Quad[i] *= scale;
-
-                    break;
-                case AspectRatioSetting.FixedHeight:
-                    var wOld = CameraSize.x / CameraSize.y;                // 宽度比高度
-                    var wNew = sizeNew.x / sizeNew.y;
-                    ratio = wOld / wNew;
-                    Translate = (v) =>
-                    {
-                        var x = v.x - 0.5f;
-                        return new Vector(x * ratio + 0.5f, v.y);
-                    };
-                    vertices = ScreenVertexListNormalized;
-                    for (int i = 0; i < vertices.Count; i++)
-                        vertices[i] = Translate(vertices[i]);
-                    CameraSize = sizeNew;                           // 这里要先设置Size
-                    ScreenVertexListNormalized = vertices;
-
-                    break;
-                case AspectRatioSetting.FixedWidth:
-                    var hOld = CameraSize.y / CameraSize.x;                // 高度比宽度
-                    var hNew = sizeNew.y / sizeNew.x;
-                    ratio = hOld / hNew;
-                    Translate = (v) =>
-                    {
-                        var y = v.y - 0.5f;
-                        return new Vector(v.x, y * ratio + 0.5f);
-                    };
-                    vertices = ScreenVertexListNormalized;
-                    for (int i = 0; i < vertices.Count; i++)
-                        vertices[i] = Translate(vertices[i]);
-                    CameraSize = sizeNew;
-                    ScreenVertexListNormalized = vertices;
-
-                    break;
+                float ratio;
+                Func<Vector, Vector> Translate;
+                List<Vector> vertices;
+                switch (viewAspectRatioSetting)
+                {
+                    case AspectRatioSetting.FixedAll:
+                        var scale = new Vector(sizeNew.x / CameraSize.x, sizeNew.y / CameraSize.y);
+                        CameraSize = sizeNew;
+                        for (int i = 0; i < Quad.Count; i++)
+                            Quad[i] *= scale;
+
+                        break;
+                    case AspectRatioSetting.FixedHeight:
+                        var wOld = CameraSize.x / CameraSize.y;                // 宽度比高度
+                        var wNew = sizeNew.x / sizeNew.y;
+                        ratio = wOld / wNew;
+                        Translate = (v) =>
+                        {
+                            var x = v.x - 0.5f;
+                            return new Vector(x * ratio + 0.5f, v.y);
+                        };
+                        vertices = ScreenVertexListNormalized;
+                        for (int i = 0; i < vertices.Count; i++)
+                            vertices[i] = Translate(vertices[i]);
+                        CameraSize = sizeNew;                           // 这里要先设置Size
+                        ScreenVertexListNormalized = vertices;
+
+                        break;
+                    case AspectRatioSetting.FixedWidth:
+                        var hOld = CameraSize.y / CameraSize.x;                // 高度比宽度
+                        var hNew = sizeNew.y / sizeNew.x;
+                        ratio = hOld / hNew;
+                        Translate = (v) =>
+                        {
+                            var y = v.y - 0.5f;
+                            return new Vector(v.x, y * ratio + 0.5f);
+                        };
+                        vertices = ScreenVertexListNormalized;
+                        for (int i = 0; i < vertices.Count; i++)
+                            vertices[i] = Translate(vertices[i]);
+                        CameraSize = sizeNew;
+                        ScreenVertexListNormalized = vertices;
+
+                        break;
+                }
             }
         }
 

+ 60 - 42
Assets/InfraredProject/WebCamera/Script/ZIM/ScreenLocate.cs

@@ -74,6 +74,12 @@ public partial class ScreenLocate : MonoBehaviour
     /// </summary>
     public bool SelectScreenAfterLocate(ScreenIdentificationTag tag) => ScreenIdentification.SelectScreenAfterLocate(tag);
 
+    /// <summary>
+    /// 上一次半自动识别的情况, 还未识别的时候数组是null
+    /// 通过索引获取布尔值,false代表这条边识别失败(回退应用了手动数据), 0-下、1-右、2-上、3-左
+    /// </summary>
+    public bool[] LastQuadSemiAutoState() => screenIdentification.LastQuadSemiAutoState;
+
     /// <summary>
     /// 获取算法执行过程中输出的纹理,0原图,1半自动识别到的全部线段,2屏幕黑白色差,3识别结果,4屏幕色差叠加识别结果,5半自动时的备选线段
     /// </summary>
@@ -97,8 +103,10 @@ public partial class ScreenLocate : MonoBehaviour
     private Texture mUVCTexture;
     public Texture getUVCTexture => mUVCTexture;
 
-    public Texture setUVCTexture { 
-        set {
+    public Texture setUVCTexture
+    {
+        set
+        {
             mUVCTexture = value;
         }
     }
@@ -190,7 +198,7 @@ public partial class ScreenLocate : MonoBehaviour
 
     [NonSerialized] public RectTransform BackQuad = null;
 
-    static public ScreenLocate Main;
+    static public ScreenLocate Main { get; private set; }
 
     static public void AutoLightPixels(Color[] pixels, int width, int height)
     {
@@ -280,6 +288,8 @@ public partial class ScreenLocate : MonoBehaviour
 
     void Awake()
     {
+        if (Main != null)
+            throw new Exception("[ScreenLocaer] 不允许多个实例");
         Main = this;
 
 #if !UNITY_EDITOR_WIN
@@ -363,23 +373,34 @@ public partial class ScreenLocate : MonoBehaviour
     public void UVCUpdate(bool bChange)
     {
         mUVCTexture = mUVCCameraInfo.previewTexture;
-        Debug.Log("[ScreenLocate] UVCUpdate:" + mUVCCameraInfo + ",bChange:"+bChange);
+        Debug.Log("[ScreenLocate] UVCUpdate:" + mUVCCameraInfo + ",bChange:" + bChange);
         InfraredCameraHelper?.InvokeOnUVCIsUpdate();
 
         //这里判断是否进入自动识别?
-        if (bAutomaticRecognitionStart) {
+        if (bAutomaticRecognitionStart)
+        {
             bAutomaticRecognitionStart = false;
             Debug.Log("[ScreenLocate] UVCUpdate 开始自动识别 Capture:" + Capture + " ,Delay: " + Delay);
             screenIdentification.LocateScreen(Capture, Delay);
         }
 
-        if (bAutomaticRecognitionEnd) {
+        if (bAutomaticRecognitionEnd)
+        {
             bAutomaticRecognitionEnd = false;
             Debug.Log("[ScreenLocate] UVCUpdate 结束捕获,当前摄像机分辨率为: " + mUVCCameraInfo.Size);
             bAutomaticRecognition = false;
         }
     }
 
+    /// <summary>
+    /// 选择模式后更新 quadUnityVectorList
+    /// </summary>
+    public void UpdateQuadUnityVectorList()
+    {
+        quadUnityVectorList = screenIdentification.Screen.QuadInCamera.GetUnityVertexNormalizedList();
+        SaveScreenLocateVectorList();
+    }
+
     int brightness = 0;
     /// <summary>
     /// 设置算法红外灯的亮度值
@@ -394,14 +415,7 @@ public partial class ScreenLocate : MonoBehaviour
                 infraredLocate.SetBrightnessThreshold(value);     // 参数是 红外灯的亮度阈值,阈值越小能够检测到的亮度就越低,默认值是0.93
         }
     }
-    /// <summary>
-    /// 选择模式后更新 quadUnityVectorList
-    /// </summary>
-    public void UpdateQuadUnityVectorList()
-    {
-        quadUnityVectorList = screenIdentification.Screen.QuadInCamera.GetUnityVertexNormalizedList();
-        SaveScreenLocateVectorList();
-    }
+
     void Update()
     {
         //++frames;
@@ -438,7 +452,7 @@ public partial class ScreenLocate : MonoBehaviour
             //infraredLocate.SetBrightnessThreshold(redfilterValue);     // 参数是 红外灯的亮度阈值,阈值越小能够检测到的亮度就越低,默认值是0.93
         }
 
-        if (RefreshCameraSize())       // 同步分辨率, 分辨率变化后还需同步到InfraredDemo
+        if (RefreshQuadSize())       // 同步分辨率, 分辨率变化后还需同步到InfraredDemo
         {
             quadUnityVectorList = screenIdentification.Screen.QuadInCamera.GetUnityVertexNormalizedList();
 
@@ -451,10 +465,11 @@ public partial class ScreenLocate : MonoBehaviour
                 Debug.Log("[ScreenLocate] RefreshCameraSize 屏幕size改变:[" + (int)getUVCCameraInfoSize.x + "," + (int)getUVCCameraInfoSize.y + "]");
                 Debug.Log("[ScreenLocate] RefreshCameraSize 屏幕size改变,刷新quadUnityVectorList:" + PrintVector2List(quadUnityVectorList));
             }
-            else {
+            else
+            {
                 Debug.LogError("[ScreenLocate] RefreshCameraSize 屏幕size改变,存在NaN值,重新校准:" + PrintVector2List(quadUnityVectorList));
             }
-         
+
             if (DebugOnZIMDemo)
                 Main.ShowScreen(screenIdentification.Screen.QuadInCamera);
 
@@ -698,15 +713,20 @@ public partial class ScreenLocate : MonoBehaviour
 
     }
 
-    private bool RefreshCameraSize()
+    private bool RefreshQuadSize()
     {
-        var sizeNew = getUVCCameraInfoSize.o0Vector();
-        screenIdentification.QuadAuto?.ReSize(sizeNew, ScreenMap.ViewAspectRatioSetting);
-        screenIdentification.QuadManual?.ReSize(sizeNew, ScreenMap.ViewAspectRatioSetting);
-        screenIdentification.QuadSemiAuto?.ReSize(sizeNew, ScreenMap.ViewAspectRatioSetting);
-        return screenIdentification.Screen.RefreshCameraSize(getUVCCameraInfoSize);
+        if (screenIdentification.Screen.RefreshCameraSize(getUVCCameraInfoSize))
+        {
+            var sizeNew = getUVCCameraInfoSize.o0Vector();
+            screenIdentification.QuadAuto?.ReSize(sizeNew, ScreenMap.ViewAspectRatioSetting);
+            screenIdentification.QuadManual?.ReSize(sizeNew, ScreenMap.ViewAspectRatioSetting);
+            screenIdentification.QuadSemiAuto?.ReSize(sizeNew, ScreenMap.ViewAspectRatioSetting);
+            return true;
+        }
+        return false;
     }
 
+
     Vector2 targetPos = Vector2.zero;
     Vector2 movePos = Vector2.zero;
     int moveSpeed = 20;
@@ -746,7 +766,7 @@ public partial class ScreenLocate : MonoBehaviour
     int Capture = 30;
     int Delay = 30;
     Vector2 EnterResolution;
-   // int DefaultResolutionIndex;
+    // int DefaultResolutionIndex;
 
     // readonly public int HighScreenLocateResolutionIndex = 2;       // 自动识别时,摄像机分辨率固定为1280 * 720 ( 对应索引是2 )
 
@@ -765,7 +785,7 @@ public partial class ScreenLocate : MonoBehaviour
         //screenIdentification.LocateScreen(Capture, Delay);
         OnLocateScreenEnter();
     }
-   // bool log1 = false, log2 = false;
+    // bool log1 = false, log2 = false;
     public void OnLocateScreenEnter()
     {
         bAutomaticRecognition = true;
@@ -779,12 +799,7 @@ public partial class ScreenLocate : MonoBehaviour
         Vector2 _HighResolution = mUVCCameraInfo.CurrentCalibrationResolution; //最高的分辨率
         Resize((int)_HighResolution.x, (int)_HighResolution.y);
 
-        if (DebugOnZIMDemo)
-            screenIdentification.LocateScreen();
-
-#if  UNITY_EDITOR
-        UVCUpdate(false);
-#endif
+        screenIdentification.LocateScreen();
 
         //CreateUVCTexture2DIfNeeded();
         // log1 = true;
@@ -812,14 +827,14 @@ public partial class ScreenLocate : MonoBehaviour
 #endif
 
 #if UNITY_STANDALONE_WIN
-       // pc todo 看看怎么处理
-       // ResizePC(width, height);
+        // pc todo 看看怎么处理
+        // ResizePC(width, height);
 #endif
 
         //mUVCCameraInfo.SetSize(width, height);      // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
-        Debug.Log($"[ScreenLocate] 开始修改分辨率 mUVCCameraInfo origin:[{ mUVCCameraInfo.CurrentWidth },{ mUVCCameraInfo.CurrentHeight }]=>target:[{ width },{ height }]");
+        Debug.Log($"[ScreenLocate] 开始修改分辨率 mUVCCameraInfo origin:[{mUVCCameraInfo.CurrentWidth},{mUVCCameraInfo.CurrentHeight}]=>target:[{width},{height}]");
 
-       // if (screenIdentification.isInitLocateScreen()) screenIdentification.bStartLocateScreen = true;
+        // if (screenIdentification.isInitLocateScreen()) screenIdentification.bStartLocateScreen = true;
     }
 
     /// <summary>
@@ -847,7 +862,7 @@ public partial class ScreenLocate : MonoBehaviour
         // Stop the current WebCamTexture
         _webCamTexture.Stop();
         // Trigger OnWebCamStopped event
-       // OnWebCamStopped?.Invoke();
+        // OnWebCamStopped?.Invoke();
         // Wait for a short time to ensure resources are released
         yield return new WaitForSeconds(0.5f);
         // Create a new WebCamTexture with the new dimensions
@@ -871,7 +886,7 @@ public partial class ScreenLocate : MonoBehaviour
         mUVCCameraInfo.SetSize(_webCamTexture.width, _webCamTexture.height);      // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
         Debug.Log("[ScreenLocate] ResizePc mUVCCameraInfo.SetSize: [" + mUVCCameraInfo.CurrentWidth + "," + mUVCCameraInfo.CurrentHeight + "]");
 
-       // if(screenIdentification.isInitLocateScreen())screenIdentification.bStartLocateScreen = true;
+        // if(screenIdentification.isInitLocateScreen())screenIdentification.bStartLocateScreen = true;
     }
     #endregion
     public void BtnScreenMap()
@@ -1000,7 +1015,7 @@ public partial class ScreenLocate : MonoBehaviour
     static public bool GetScreenLocateVectorList()
     {
         string posListStr = PlayerPrefs.GetString("ScreenLocateVectorList", "");
-        Debug.Log("GetScreenLocateVectorList:"+ posListStr);
+        Debug.Log("GetScreenLocateVectorList:" + posListStr);
         if (!string.IsNullOrWhiteSpace(posListStr))
         {
             quadUnityVectorList.Clear();
@@ -1018,7 +1033,7 @@ public partial class ScreenLocate : MonoBehaviour
 
     }
 
-    public Vector2 AdjustPointsOffset(Vector2 inputPoint,string type = "CameraLocation")
+    public Vector2 AdjustPointsOffset(Vector2 inputPoint, string type = "CameraLocation")
     {
         // 计算从原始中心到输入点的偏移量
         if (type == "CameraLocation")
@@ -1026,7 +1041,8 @@ public partial class ScreenLocate : MonoBehaviour
             CameraLocationOffset = inputPoint - screenIdentification.Screen.TransformToCamera(new Vector2(0.5f, 0.5f) * screenIdentification.Screen.UVSize);
             return CameraLocationOffset;
         }
-        else {
+        else
+        {
             //ScreenUV
             UVOffset = inputPoint - new Vector2(0.5f, 0.5f);
             return UVOffset;
@@ -1035,7 +1051,8 @@ public partial class ScreenLocate : MonoBehaviour
     /// <summary>
     /// 重置偏移量
     /// </summary>
-    public void ResetPointsOffest() {
+    public void ResetPointsOffest()
+    {
         CameraLocationOffset = Vector2.zero;
         UVOffset = Vector2.zero;
     }
@@ -1045,7 +1062,8 @@ public partial class ScreenLocate : MonoBehaviour
     /// </summary>
     /// <param name="cameraLocatoin"></param>
     /// <returns></returns>
-    public Vector2 GetOffsetCameraLocation(Vector2 cameraLocatoin) {
+    public Vector2 GetOffsetCameraLocation(Vector2 cameraLocatoin)
+    {
         return cameraLocatoin - CameraLocationOffset;
     }
 

+ 9 - 0
Assets/InfraredProject/WebCamera/Script/ZIM/ZIMUnity/ZIMMath.cs

@@ -28,5 +28,14 @@ namespace ZIM.Unity
 
         public static float LengthManhattan(this Vector2 v) => Math.Abs(v.x) + Math.Abs(v.y);
         public static double LengthManhattan(Vector2 v1, Vector2 v2) => (v1 - v2).LengthManhattan();
+
+        // 可以用于判断点位V于直线AB哪一侧
+        // return的值 等于0: 点在线上,大于0: 三角形ABV是逆时针,小于0: 三角形ABV是顺时针 (注:Line的构造函数会修改AB的顺序)
+        public static float LineCrossWithPoint(this Line l, Vector v)
+        {
+            var ld = l.B - l.A;
+            var vd = v - l.A;
+            return ld.x * vd.y - ld.y * vd.x;
+        }
     }
 }