ScreenLocate.cs 50 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291
  1. #define ENABLE_LOG
  2. using InfraredManager;
  3. using o0;
  4. using SLAMUVC;
  5. using System;
  6. using System.Collections;
  7. using System.Collections.Generic;
  8. using System.Linq;
  9. using UnityEngine;
  10. using UnityEngine.Experimental.AI;
  11. using UnityEngine.UI;
  12. using ZIM;
  13. using ZIM.Unity;
  14. using static SLAMUVC.UVCManager;
  15. using Color = UnityEngine.Color;
  16. using Time = UnityEngine.Time;
  17. [RequireComponent(typeof(Canvas))]
  18. public partial class ScreenLocate : MonoBehaviour
  19. {
  20. public InfraredCameraHelper InfraredCameraHelper;
  21. private const string TAG = "ScreenLocate#";
  22. public enum InfraredCount : int
  23. {
  24. Single = 1,
  25. Double = 2
  26. }
  27. enum Mode
  28. {
  29. InfraredLocate,
  30. ScreenMap,
  31. ScreenLocateManual
  32. }
  33. enum Platform
  34. {
  35. Window,
  36. Android
  37. }
  38. Platform mPlatform = Platform.Android;
  39. public enum ScreenIdentificationTag
  40. {
  41. // 屏幕定位的方式,手动、半自动、自动
  42. Manual,
  43. SemiAuto,
  44. Auto
  45. }
  46. // 2个灯,顺序根据红外灯的大小 由大到小, 坐标通过 InfraredSpot.ScreenUV 和 InfraredSpot.CameraLocation 获得
  47. public InfraredSpot[] InfraredSpots
  48. {
  49. get
  50. {
  51. infraredCount = InfraredCount.Double;
  52. return infraredSpotBuffer;
  53. }
  54. }
  55. // 1个灯, 坐标通过 InfraredSpot.ScreenUV 和 InfraredSpot.CameraLocation 获得
  56. public InfraredSpot InfraredSpotSingle
  57. {
  58. get
  59. {
  60. infraredCount = InfraredCount.Single;
  61. return infraredSpotBuffer[0];
  62. }
  63. }
  64. public InfraredSpot[] infraredSpotBuffer;
  65. public string GetInfraredCount() { return infraredCount.ToString(); }
  66. /// <summary>
  67. /// 定位之后,可能有3种结果(手动、半自动、自动),从中选择一种作为最终识别到的屏幕。
  68. /// 如果选择的是null,即没有识别到屏幕,则返回false,否则返回true
  69. /// </summary>
  70. public bool SelectScreenAfterLocate(ScreenIdentificationTag tag) => ScreenIdentification.SelectScreenAfterLocate(tag);
  71. /// <summary>
  72. /// 上一次半自动识别的情况, 还未识别的时候数组是null
  73. /// 通过索引获取布尔值,false代表这条边识别失败(回退应用了手动数据), 0-下、1-右、2-上、3-左
  74. /// </summary>
  75. public bool[] LastQuadSemiAutoState() => screenIdentification.LastQuadSemiAutoState;
  76. /// <summary>
  77. /// 获取算法执行过程中输出的纹理,0原图,1半自动识别到的全部线段,2屏幕黑白色差,3识别结果,4屏幕色差叠加识别结果,5半自动时的备选线段
  78. /// </summary>
  79. public Texture2D[] OutputTextures => outputTexture2D;
  80. /// <summary>
  81. /// CameraLocation 的偏移量
  82. /// </summary>
  83. public Vector2 CameraLocationOffset { get; set; } = new Vector2(0, 0);
  84. //用来记录最后一次更新的数据
  85. Vector2 OldCameraLocationOffset { get; set; } = new Vector2(0, 0);
  86. public Vector2 UVOffset { get; set; } = new Vector2(0, 0);
  87. //用来记录最后一次更新的数据
  88. Vector2 OldUVOffset { get; set; } = new Vector2(0, 0);
  89. // public InfraredDemo InfraredDemoMain => FindObjectOfType<InfraredDemo>();
  90. #region UVC 处理的对象
  91. //public UVCManager mUVCManager;
  92. public CameraInfo mUVCCameraInfo;
  93. public bool getUVCCameraInfo => mUVCCameraInfo != null ? true : false;
  94. public Vector2 getUVCCameraInfoSize => getUVCCameraInfo ? mUVCCameraInfo.Size : new Vector2(320, 240);
  95. private Texture mUVCTexture;
  96. public Texture getUVCTexture => mUVCTexture;
  97. public Texture setUVCTexture
  98. {
  99. set
  100. {
  101. mUVCTexture = value;
  102. }
  103. }
  104. private Texture2D mUVCTexture2D;
  105. // [SerializeField] Texture2DArray mUVCOutArray;
  106. #endregion
  107. public Text Info;
  108. public List<RectTransform> CrosshairInCamera;
  109. public List<RectTransform> CrosshairInScreen;
  110. public RectTransform ScreenQuad;
  111. public Toggle SaveToggle;
  112. public Toggle FullScreenToggle;
  113. public LineGenerator UILineGenerator;
  114. public bool ShowScreenQuad = false;
  115. // 显示在demo上的rawImage
  116. public List<RawImage> outputRawImages;
  117. readonly Texture2D[] outputTexture2D = new Texture2D[8];
  118. public RawImage FullScreenImage;
  119. public PixelCheaker ScreenPixelCheaker;
  120. public InfraredSpotSettings InfraredSpotSettings;
  121. // 全局记录当前算法中的CameraSize,红外识别和屏幕识别都会使用到
  122. public o0.Geometry2D.Vector<int> CameraSize { get; set; }
  123. public List<Texture2D> DebugScreenImages = new List<Texture2D>();
  124. public bool DebugOnZIMDemo = false;
  125. // private SynchronizationContext mainContext;
  126. //是否单点显示
  127. public bool bSinglePoint = true;//默认单点识别
  128. [NonSerialized] public InfraredCount infraredCount = InfraredCount.Single; // 识别红外灯的数量,1个或者2个
  129. public float ReDoLocateCalibrationRatio { get; private set; } // 半自动定位时校准的距离比例,以手动的结果来校准,离手动太远的线段会被舍弃
  130. bool bIdentifyRed = true;//默认设备红色
  131. bool bIdentifyGreen = true;
  132. #region 性能检测相关
  133. public Text m_UITime;
  134. const float m_UIUpdateInterval = 0.1f;
  135. float m_UIUpdateTimer = 0.0f;
  136. List<float> m_History = new List<float>(100);
  137. int m_ValidHistoryFrames = 0;
  138. float m_AverageTime = float.NaN;
  139. float m_MedianTime = float.NaN;
  140. float m_MinTime = float.NaN;
  141. float m_MaxTime = float.NaN;
  142. public float updateInterval = 0.5F;
  143. private double lastInterval;
  144. private int frames = 0;
  145. private float fps;
  146. public Text m_FPS;
  147. #endregion
  148. #region PC部分参数
  149. //亮度
  150. public float pcBrightness { get; set; } = 0.0f;
  151. //对比度
  152. public float pcContrast { get; set; } = 0.0f;
  153. #endregion
  154. // 红外灯识别算法
  155. InfraredLocate infraredLocate;
  156. // 屏幕识别算法
  157. o0.Project.ScreenIdentification screenIdentification;
  158. public o0.Project.ScreenIdentification ScreenIdentification => screenIdentification;
  159. RectTransform canvas;
  160. Mode mode;
  161. //List<(Vector2 pos, GameObject go)> pointManual = new List<(Vector2, GameObject)>();
  162. //o0.Project.WebCam o0WebCam = null;
  163. /// <summary>
  164. /// 正在识别的状态,自动识别时候记录
  165. /// </summary>
  166. bool bAutomaticRecognition { get; set; } = false;//进行捕获时
  167. bool bAutomaticRecognitionStart { get; set; } = false;//是否进行捕获
  168. bool bAutomaticRecognitionEnd { get; set; } = false;//是否结束捕获
  169. [NonSerialized] public RectTransform BackQuad = null;
  170. static public ScreenLocate Main { get; private set; }
  171. static public void AutoLightPixels(Color[] pixels, int width, int height)
  172. {
  173. if (Main.DebugOnZIMDemo)
  174. {
  175. var newTex = pixels.zimAutoLightSimple(width, height);
  176. DebugTexture(7, newTex);
  177. try
  178. {
  179. Main.FullScreenImage.texture = newTex;
  180. }
  181. catch { }
  182. }
  183. }
  184. static public void DebugTexture(int index, Texture2D texture)
  185. {
  186. LateDestory(Main.outputTexture2D[index]);
  187. Main.outputTexture2D[index] = texture;
  188. try
  189. {
  190. Main.outputRawImages[index].texture = texture;
  191. }
  192. catch { }
  193. }
  194. static void LateDestory(UnityEngine.Object o) => Main.StartCoroutine(Main.LateDestoryIEnum(o));
  195. static public void SetScreen(UnityEngine.Color? color = null)
  196. {
  197. if (Main.BackQuad == null)
  198. {
  199. var canvas = GameObject.Find("WebCameraView").GetComponent<RectTransform>();
  200. var background = canvas.Find("Background");
  201. Main.BackQuad = background.GetChild(0).GetComponent<RectTransform>();
  202. }
  203. Main.BackQuad.parent.gameObject.SetActive(color != null);
  204. Main.BackQuad.GetComponent<RawImage>().color = color ?? Color.black;
  205. //Debug.Log("Set Screen " + color.GetColorName());
  206. }
  207. static public void SetScreen(Rect rect, UnityEngine.Color? color = null)
  208. {
  209. if (Main.BackQuad == null)
  210. {
  211. var canvas = GameObject.Find("WebCameraView").GetComponent<RectTransform>();
  212. var background = canvas.Find("Background");
  213. Main.BackQuad = background.GetChild(0).GetComponent<RectTransform>();
  214. }
  215. Main.BackQuad.parent.gameObject.SetActive(color != null);
  216. Main.BackQuad.anchorMin = rect.min;
  217. Main.BackQuad.anchorMax = rect.max;
  218. Main.BackQuad.GetComponent<RawImage>().color = color ?? Color.black;
  219. //Debug.Log("Set Screen " + color.GetColorName());
  220. }
  221. static void DebugBackQuad(Rect? rect = null)
  222. {
  223. if (Main.BackQuad)
  224. {
  225. Main.BackQuad.parent.GetComponent<RawImage>().enabled = false;
  226. Main.BackQuad.GetComponent<RawImage>().color = Color.white;
  227. Main.BackQuad.parent.gameObject.SetActive(!Main.BackQuad.parent.gameObject.activeSelf);
  228. if (rect.HasValue)
  229. {
  230. Main.BackQuad.anchorMin = rect.Value.min;
  231. Main.BackQuad.anchorMax = rect.Value.max;
  232. }
  233. }
  234. }
  235. //public void ReSizeTexture(int width, int height)
  236. //{
  237. // Debug.Log("Cur mUVCTexture Size: [" + mUVCTexture.width + "," + mUVCTexture.height + "]");
  238. // if (mUVCTexture.width < width || mUVCTexture.height < height) // 如果当前分辨率太小,则重新new一个texture
  239. // {
  240. // Texture2D tex = new Texture2D(
  241. // width, height,
  242. // TextureFormat.ARGB32,
  243. // false, /* mipmap */
  244. // true /* linear */);
  245. // tex.filterMode = FilterMode.Point;
  246. // tex.Apply();
  247. // mUVCTexture = tex;
  248. // mUVCCameraInfo.previewTexture = tex;
  249. // var nativeTexPtr = mUVCCameraInfo.previewTexture.GetNativeTexturePtr();
  250. // }
  251. //}
  252. void Awake()
  253. {
  254. if (Main != null)
  255. throw new Exception("[ScreenLocaer] 不允许多个实例");
  256. Main = this;
  257. #if !UNITY_EDITOR_WIN
  258. DebugOnZIMDemo = false;
  259. #endif
  260. //if (mUVCDrawer)
  261. // mUVCDrawer.StartPreviewAction += UVCIsReady;
  262. }
  263. void OnDestroy()
  264. {
  265. //if (mUVCDrawer)
  266. // mUVCDrawer.StartPreviewAction -= UVCIsReady;
  267. }
  268. void Start()
  269. {
  270. //mainContext = SynchronizationContext.Current;
  271. canvas = transform.GetComponent<RectTransform>();
  272. mode = Mode.InfraredLocate;
  273. if (DebugScreenImages.Count != 0 && DebugOnZIMDemo)
  274. {
  275. screenIdentification = new o0.Project.ScreenIdentification();
  276. screenIdentification.LocateScreen();
  277. }
  278. infraredCount = InfraredCount.Single;
  279. ReDoLocateCalibrationRatio = 0.125f;
  280. #region 性能检测相关
  281. for (var i = 0; i < m_History.Capacity; ++i)
  282. {
  283. m_History.Add(0.0f);
  284. }
  285. lastInterval = Time.realtimeSinceStartup;
  286. frames = 0;
  287. #endregion
  288. }
  289. IEnumerator LateDestoryIEnum(UnityEngine.Object o)
  290. {
  291. if (o)
  292. {
  293. yield return new WaitForEndOfFrame();
  294. Destroy(o);
  295. }
  296. }
  297. //ZIMWebCamera场景使用
  298. public void WebCamIsReady(Texture texture)
  299. {
  300. mPlatform = Platform.Window;
  301. mUVCTexture = texture;
  302. mUVCCameraInfo = new CameraInfo(mUVCTexture);
  303. brightness = 0;
  304. //UVC准备好
  305. InfraredCameraHelper?.InvokeOnUVCIsReady(mUVCCameraInfo);
  306. }
  307. /// <summary>
  308. /// UVCManager 创建初始化时候,更新此函数
  309. /// </summary>
  310. /// <param name="cameraInfo"></param>
  311. public void UVCIsReady(CameraInfo cameraInfo)
  312. {
  313. mPlatform = Platform.Android;
  314. mUVCTexture = cameraInfo.previewTexture;
  315. mUVCCameraInfo = cameraInfo;
  316. Debug.Log("UVCIsReady:" + mUVCCameraInfo);
  317. //UVC准备好
  318. InfraredCameraHelper?.InvokeOnUVCIsReady(mUVCCameraInfo);
  319. }
  320. /// <summary>
  321. /// 获取新的 previewTexture
  322. /// </summary>
  323. public void UVCUpdate(bool bChange)
  324. {
  325. mUVCTexture = mUVCCameraInfo.previewTexture;
  326. Debug.Log("[ScreenLocate] UVCUpdate:" + mUVCCameraInfo + ",bChange:" + bChange);
  327. InfraredCameraHelper?.InvokeOnUVCIsUpdate();
  328. //这里判断是否进入自动识别?
  329. if (bAutomaticRecognitionStart)
  330. {
  331. bAutomaticRecognitionStart = false;
  332. Debug.Log("[ScreenLocate] UVCUpdate 开始自动识别 Capture:" + Capture + " ,Delay: " + Delay);
  333. screenIdentification.LocateScreen(Capture, Delay);
  334. }
  335. if (bAutomaticRecognitionEnd)
  336. {
  337. bAutomaticRecognitionEnd = false;
  338. Debug.Log("[ScreenLocate] UVCUpdate 结束捕获,当前摄像机分辨率为: " + mUVCCameraInfo.Size);
  339. bAutomaticRecognition = false;
  340. }
  341. }
  342. /// <summary>
  343. /// 选择模式后更新 quadUnityVectorList
  344. /// </summary>
  345. public void UpdateQuadUnityVectorList()
  346. {
  347. quadUnityVectorList = screenIdentification.Screen.QuadInCamera.GetUnityVertexNormalizedList();
  348. SaveScreenLocateVectorList();
  349. }
  350. int brightness = 0;
  351. /// <summary>
  352. /// 设置算法红外灯的亮度值
  353. /// </summary>
  354. /// <param name="value"></param>
  355. public void SetInfraredLocateBrightnessThreshold(float value)
  356. {
  357. if (infraredLocate != null)
  358. {
  359. if (value >= 0 && value <= 1)
  360. infraredLocate.SetBrightnessThreshold(value); // 参数是 红外灯的亮度阈值,阈值越小能够检测到的亮度就越低,默认值是0.93
  361. }
  362. }
  363. void Update()
  364. {
  365. //++frames;
  366. //float timeNow = Time.realtimeSinceStartup;
  367. //if (timeNow > lastInterval + updateInterval)
  368. //{
  369. // fps = (float)(frames / (timeNow - lastInterval));
  370. // frames = 0;
  371. // lastInterval = timeNow;
  372. //}
  373. //if (m_FPS != null)
  374. // m_FPS.text = "FPS:" + fps.ToString("f2");
  375. if (mUVCCameraInfo == null) return;
  376. if (screenIdentification == null)
  377. {
  378. screenIdentification = new o0.Project.ScreenIdentification();
  379. Debug.Log("[ScreenLocate] 初始化屏幕识别");
  380. //screenIdentification.OnLocateScreenEnter += OnLocateScreenEnter;
  381. screenIdentification.OnLocateScreenEnd += OnLocateScreenEnd;
  382. }
  383. if (infraredLocate == null)
  384. {
  385. infraredLocate = new InfraredLocate(mUVCCameraInfo, screenIdentification, InfraredSpotSettings, ScreenPixelCheaker);
  386. CameraSize = new o0.Geometry2D.Vector<int>((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  387. InfraredSpot.RefreshMinVerifyLength(new o0.Geometry2D.Float.Vector(getUVCCameraInfoSize.x, getUVCCameraInfoSize.y));
  388. Debug.Log($"[ScreenLocate] 初始化红外灯识别, 当前相机分辨率: {CameraSize.x}×{CameraSize.y},红外算法追踪距离: {InfraredSpot.MinVerifyLength}");
  389. //InfraredDemo 初始化
  390. //float redfilterValue = PlayerPrefs.GetFloat("Init redFilterSliderValue", 0.8f);
  391. //Debug.Log("Init Red filterValue:" + redfilterValue);
  392. //infraredLocate.SetBrightnessThreshold(redfilterValue); // 参数是 红外灯的亮度阈值,阈值越小能够检测到的亮度就越低,默认值是0.93
  393. }
  394. /* New*/
  395. //Debug.Log((mUVCCameraInfo != null) +" = "+ mUVCCameraInfo.IsPreviewing + " = "+ screenIdentification.Screen.Active);
  396. if (mUVCCameraInfo != null && mUVCCameraInfo.IsPreviewing)
  397. {
  398. //if (bAutomaticRecognition)
  399. //{
  400. // //识别的过程使用的分辨率
  401. // //CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  402. // if (log1)
  403. // {
  404. // log1 = false;
  405. // Debug.Log("[ScreenLocate] log1:[" + (int)getUVCCameraInfoSize.x + ", " + (int)getUVCCameraInfoSize.y + "]");
  406. // }
  407. //}
  408. //else
  409. //{
  410. // //自动识别完成后使用相机分辨率大小 getUVCCameraInfoSize
  411. // //CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  412. // if (log2)
  413. // {
  414. // log2 = false;
  415. // Debug.Log("[ScreenLocate] log2:[" + (int)getUVCCameraInfoSize.x + ", " + (int)getUVCCameraInfoSize.y + "]");
  416. // }
  417. //}
  418. //如果是连接了蓝牙设备,并且不是9轴设备。不进行识别算法处理
  419. if (BluetoothAim.ins?.status == BluetoothStatusEnum.ConnectSuccess && AimHandler.ins && AimHandler.ins.bRuning9Axis()) return;
  420. //根据getUVCCameraInfoSize 分辨率渲染
  421. CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  422. if (!screenIdentification.Update(mUVCTexture2D))
  423. {
  424. // 同步分辨率, 分辨率变化后还需同步到InfraredDemo
  425. if (RefreshCameraSize())
  426. {
  427. if (screenIdentification.Screen.QuadInCamera != null)
  428. {
  429. quadUnityVectorList = screenIdentification.Screen.QuadInCamera.GetUnityVertexNormalizedList();
  430. if (!ContainsNaN(quadUnityVectorList))
  431. {
  432. SaveScreenLocateVectorList();
  433. //SyncInfraredDemo();
  434. //SyncInfraredScreenPositioningView();
  435. InfraredCameraHelper?.InvokeOnUVCPosUpdate(quadUnityVectorList);
  436. Debug.Log("[ScreenLocate] RefreshCameraSize 屏幕size改变:[" + (int)getUVCCameraInfoSize.x + "," + (int)getUVCCameraInfoSize.y + "]");
  437. Debug.Log("[ScreenLocate] RefreshCameraSize 屏幕size改变,刷新quadUnityVectorList:" + PrintVector2List(quadUnityVectorList));
  438. }
  439. else
  440. {
  441. Debug.LogError("[ScreenLocate] RefreshCameraSize 屏幕size改变,存在NaN值,重新校准:" + PrintVector2List(quadUnityVectorList));
  442. }
  443. }
  444. if (DebugOnZIMDemo)
  445. Main.ShowScreen(screenIdentification.Screen.QuadInCamera);
  446. }
  447. if (CameraSize.x != mUVCTexture2D.width || CameraSize.y != mUVCTexture2D.height)
  448. {
  449. Debug.Log($"<color=red>[ScreenLocate] 分辨率不匹配,相机分辨率为: {getUVCCameraInfoSize}, mUVCTexture2D纹理尺寸: {mUVCTexture2D.width}×{mUVCTexture2D.height}</color>");
  450. return;
  451. }
  452. // 获取像素,用于后续操作
  453. var pixels = mUVCTexture2D.GetPixels(); // 从左往右、从下往上
  454. AutoLightPixels(pixels, CameraSize.x, CameraSize.y);
  455. if (bSinglePoint)
  456. infraredSpotBuffer = infraredLocate.UpdateSingle(pixels);
  457. else
  458. infraredSpotBuffer = infraredLocate.Update(pixels);
  459. if (mode == Mode.ScreenLocateManual)
  460. {
  461. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  462. {
  463. if (infraredSpotBuffer[i].CameraLocation != null)
  464. {
  465. // 检测到光点
  466. var posInCanvas = infraredSpotBuffer[i].CameraLocation.Value.pixelToLocalPosition_AnchorCenter(CameraSize, FullScreenImage.rectTransform.rect);
  467. CrosshairInCamera[i].gameObject.SetActive(true);
  468. CrosshairInCamera[i].anchoredPosition = posInCanvas;
  469. }
  470. else
  471. CrosshairInCamera[i].gameObject.SetActive(false);
  472. }
  473. }
  474. else if (mode == Mode.InfraredLocate)
  475. {
  476. if (mPlatform == Platform.Window) //渲染ui上面的点。进入游戏可以隐藏
  477. {
  478. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  479. {
  480. if (infraredSpotBuffer[i].CameraLocation != null)
  481. {
  482. // 检测到光点
  483. var posInCanvas = infraredSpotBuffer[i].CameraLocation.Value.pixelToLocalPosition_AnchorCenter(CameraSize, outputRawImages[0].rectTransform.rect);
  484. CrosshairInCamera[i].gameObject.SetActive(true);
  485. CrosshairInCamera[i].anchoredPosition = posInCanvas;
  486. }
  487. else
  488. CrosshairInCamera[i].gameObject.SetActive(false);
  489. }
  490. }
  491. //手机端使用 mPlatform == Platform.Android &&
  492. //通用,手机 和 PC
  493. if (infraredSpotBuffer.Length > 0)
  494. {
  495. int redIndex = 0;
  496. int greenIndex = 1;
  497. //仅仅第一个点显示(如果最大点出界了会闪烁)
  498. if (bSinglePoint)
  499. {
  500. redIndex = 0; //单点识别是,可以选择切换颜色
  501. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  502. {
  503. string str = "Single:";
  504. Info.text = str + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  505. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  506. onFilterPos(infraredSpotBuffer[redIndex].ScreenUV.Value);
  507. }
  508. }
  509. else
  510. {
  511. //雙點模式下選擇第一個點
  512. if (bIdentifyRed && !bIdentifyGreen)
  513. {
  514. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  515. {
  516. Info.text = "Red" + redIndex + ":" + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  517. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  518. onFilterPos2(infraredSpotBuffer[redIndex].ScreenUV.Value, redIndex);
  519. }
  520. else
  521. {
  522. Info.text = "未检测到红色最大点!";
  523. }
  524. }
  525. else if (!bIdentifyRed && bIdentifyGreen)
  526. {
  527. if (infraredSpotBuffer[greenIndex].ScreenUV != null)
  528. {
  529. Info.text = "Green:" + infraredSpotBuffer[greenIndex].ScreenUV.Value.ToString("F4");
  530. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[greenIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[greenIndex].ScreenUV.Value.y * Screen.height, 0));
  531. onFilterPos2(infraredSpotBuffer[greenIndex].ScreenUV.Value, greenIndex);
  532. }
  533. else
  534. {
  535. Info.text = "未检测到绿色点!";
  536. }
  537. }
  538. else
  539. {
  540. //两个不选择和两个全选都跑识别两个点
  541. //自動切換 检测到光点
  542. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  543. {
  544. Info.text = "Red:" + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  545. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  546. onFilterPos2(infraredSpotBuffer[redIndex].ScreenUV.Value, redIndex);
  547. }
  548. else if (infraredSpotBuffer[greenIndex].ScreenUV != null)
  549. {
  550. Info.text = "Green:" + infraredSpotBuffer[greenIndex].ScreenUV.Value.ToString("F4");
  551. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[greenIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[greenIndex].ScreenUV.Value.y * Screen.height, 0));
  552. onFilterPos2(infraredSpotBuffer[greenIndex].ScreenUV.Value, greenIndex);
  553. }
  554. else
  555. {
  556. Info.text = "未检测到点!";
  557. }
  558. }
  559. }
  560. }
  561. }
  562. else if (mode == Mode.ScreenMap && DebugOnZIMDemo)
  563. {
  564. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  565. {
  566. if (infraredSpotBuffer[i].ScreenUV != null)
  567. {
  568. // 检测到光点
  569. var posInCanvas = infraredSpotBuffer[i].ScreenUV.Value.pixelToLocalPosition_AnchorCenter(new Vector2(1, 1), canvas.rect);
  570. CrosshairInScreen[i].gameObject.SetActive(true);
  571. CrosshairInScreen[i].anchoredPosition = posInCanvas;
  572. }
  573. else
  574. CrosshairInScreen[i].gameObject.SetActive(false);
  575. }
  576. if (Input.GetKeyDown(KeyCode.Escape))
  577. ToMode(Mode.InfraredLocate);
  578. }
  579. }
  580. }
  581. //var t1 = Time.realtimeSinceStartup;
  582. //var dt = t1 - t0;
  583. //m_History[m_ValidHistoryFrames % m_History.Count] = dt;
  584. //++m_ValidHistoryFrames;
  585. //m_UIUpdateTimer += Time.deltaTime;
  586. //if (m_UIUpdateTimer >= m_UIUpdateInterval)
  587. //{
  588. // m_UIUpdateTimer = 0.0f;
  589. // if (m_ValidHistoryFrames >= m_History.Count)
  590. // {
  591. // m_ValidHistoryFrames = 0;
  592. // m_AverageTime = 0.0f;
  593. // m_MinTime = float.PositiveInfinity;
  594. // m_MaxTime = float.NegativeInfinity;
  595. // {
  596. // for (var i = 0; i < m_History.Count; i++)
  597. // {
  598. // var time = m_History[i];
  599. // m_AverageTime += time;
  600. // m_MinTime = Mathf.Min(m_MinTime, time);
  601. // m_MaxTime = Mathf.Max(m_MaxTime, time);
  602. // }
  603. // m_AverageTime /= m_History.Count;
  604. // }
  605. // {
  606. // m_History.Sort();
  607. // // Odd-length history?
  608. // if ((m_History.Count & 1) != 0)
  609. // {
  610. // m_MedianTime = m_History[m_History.Count / 2];
  611. // }
  612. // else
  613. // {
  614. // m_MedianTime = (m_History[m_History.Count / 2] + m_History[m_History.Count / 2 - 1]) / 2.0f;
  615. // }
  616. // }
  617. // }
  618. // var statistics = $"{m_History.Count} 帧样本:\naverage: {m_AverageTime * 1000.0f:F2}ms\nmedian: {m_MedianTime * 1000.0f:F2}ms\nmin: {m_MinTime * 1000.0f:F2}ms\nmax: {m_MaxTime * 1000.0f:F2}ms\n";
  619. // //Method: {m_Method} {UnityEngine.SceneManagement.SceneManager.GetActiveScene().name} |
  620. // if (m_UITime != null)
  621. // m_UITime.text = $"Cam: {mUVCCameraInfo.CurrentWidth}x{mUVCCameraInfo.CurrentHeight}{(mUVCTexture2D? ",T2D:" : "")}{(mUVCTexture2D? mUVCTexture2D.width+ "x" : "")}{(mUVCTexture2D ? mUVCTexture2D.height:"")} \nLast Frame: {dt * 1000.0f:F2}ms \n{statistics}";
  622. //}
  623. //UpdateInputs();
  624. if (DebugOnZIMDemo)
  625. {
  626. if (Input.GetKeyDown(KeyCode.Z))
  627. SelectScreenAfterLocate(ScreenIdentificationTag.Manual);
  628. if (Input.GetKeyDown(KeyCode.X))
  629. SelectScreenAfterLocate(ScreenIdentificationTag.SemiAuto);
  630. if (Input.GetKeyDown(KeyCode.C))
  631. SelectScreenAfterLocate(ScreenIdentificationTag.Auto);
  632. }
  633. }
  634. private bool RefreshCameraSize()
  635. {
  636. var sizeNew = new o0.Geometry2D.Vector<int>((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  637. var sizeNewFloat = getUVCCameraInfoSize.o0Vector();
  638. if (sizeNew != CameraSize || (screenIdentification?.Screen?.QuadInCamera != null && sizeNewFloat != screenIdentification.Screen.QuadInCamera.CameraSize))
  639. {
  640. Debug.Log($"<color=aqua>[ScreenLocate] 分辨率变化,刷新分辨率(from {CameraSize.x}×{CameraSize.y} to {sizeNew.x}×{sizeNew.y}), 是否有屏幕数据: {screenIdentification.Screen.QuadInCamera != null}, 是否有手动数据: {screenIdentification.QuadManual != null}</color>");
  641. // 同步相机分辨率
  642. CameraSize = sizeNew;
  643. screenIdentification.Screen.RefreshCameraSize(sizeNewFloat);
  644. screenIdentification.QuadAuto?.ReSize(sizeNewFloat, ScreenMap.ViewAspectRatioSetting);
  645. screenIdentification.QuadManual?.ReSize(sizeNewFloat, ScreenMap.ViewAspectRatioSetting);
  646. screenIdentification.QuadSemiAuto?.ReSize(sizeNewFloat, ScreenMap.ViewAspectRatioSetting);
  647. InfraredSpot.RefreshMinVerifyLength(sizeNewFloat);
  648. return true;
  649. }
  650. return false;
  651. }
  652. Vector2 targetPos = Vector2.zero;
  653. Vector2 movePos = Vector2.zero;
  654. int moveSpeed = 20;
  655. public float filterDis = 3.0f;
  656. void onFilterPos(Vector2 _vector2Pos)
  657. {
  658. //主要用于模拟九轴时候的
  659. //添加一个偏移量,使得最后输出的准心是指向正中心
  660. Vector2 np = new Vector2((_vector2Pos.x - UVOffset.x) * Screen.width, (_vector2Pos.y - UVOffset.y) * Screen.height); //_vector2Pos.pixelToLocalPosition_AnchorCenter(Vector2.one, (transform as RectTransform).rect);
  661. if (Vector2.Distance(np, targetPos) >= filterDis)
  662. {
  663. targetPos = np;
  664. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(targetPos.x, targetPos.y, 0));
  665. //Vector2 np = new Vector2(uvCenterOffset.x * Screen.width, uvCenterOffset.y * Screen.height);
  666. //point -= np;
  667. InfraredCameraHelper?.InvokeOnPositionUpdate(targetPos);
  668. }
  669. //movePos = Vector3.Lerp(movePos, targetPos, Time.deltaTime * moveSpeed);
  670. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(movePos.x, movePos.y, 0));
  671. }
  672. Vector2[] _targetPoints2 = new Vector2[] { Vector2.zero, Vector2.zero };
  673. void onFilterPos2(Vector2 _vector2Pos, int index)
  674. {
  675. Vector2 np = new Vector2((_vector2Pos.x - UVOffset.x) * Screen.width, (_vector2Pos.y - UVOffset.y) * Screen.height);
  676. if (Vector2.Distance(np, _targetPoints2[index]) >= filterDis)
  677. {
  678. _targetPoints2[index] = np;
  679. InfraredCameraHelper.InvokeOnPositionUpdate2(_targetPoints2[index], index);
  680. }
  681. }
  682. #region 自动识别
  683. int Capture = 30;
  684. int Delay = 30;
  685. Vector2 EnterResolution;
  686. // int DefaultResolutionIndex;
  687. // readonly public int HighScreenLocateResolutionIndex = 2; // 自动识别时,摄像机分辨率固定为1280 * 720 ( 对应索引是2 )
  688. public void BtnScreenLocate()
  689. {
  690. if (DebugScreenImages.Count != 0)
  691. {
  692. //screenIdentification = new o0.Project.ScreenIdentification();
  693. CameraSize = new o0.Geometry2D.Vector<int>(DebugScreenImages[0].width, DebugScreenImages[0].height);
  694. WebCamIsReady(DebugScreenImages[0]);
  695. CreateUVCTexture2DIfNeeded();
  696. }
  697. //Debug.Log("BtnScreenLocate Capture:" + Capture + " ,Delay: " + Delay);
  698. //screenIdentification.LocateScreen(Capture, Delay);
  699. OnLocateScreenEnter();
  700. }
  701. // bool log1 = false, log2 = false;
  702. public void OnLocateScreenEnter()
  703. {
  704. bAutomaticRecognition = true;
  705. bAutomaticRecognitionStart = true;
  706. ResetScreenIdentification();
  707. //DefaultResolutionIndex = InfraredDemoMain?.ResolutionIndex ?? 0; // 记录一下进入前的分辨率(游戏场景的分辨率,比识别时更低)
  708. //HighScreenLocateResolutionIndex = InfraredDemoMain.getTextureToResolutionNewIndex(); // index = 0
  709. // Debug.Log("[ScreenLocate] 开始捕获 DefaultResolutionIndex:" + DefaultResolutionIndex + " ,HighScreenLocateResolutionIndex:" + HighScreenLocateResolutionIndex);
  710. // InfraredDemoMain?.SetResolutionNew(HighScreenLocateResolutionIndex);
  711. EnterResolution = mUVCCameraInfo.Size;// 记录一下进入前的分辨率(游戏场景的分辨率,比识别时更低)
  712. Vector2 _HighResolution = mUVCCameraInfo.CurrentCalibrationResolution; //最高的分辨率
  713. Resize((int)_HighResolution.x, (int)_HighResolution.y);
  714. //CreateUVCTexture2DIfNeeded();
  715. // log1 = true;
  716. // log2 = true;
  717. screenIdentification.LocateScreen(); // 自动识别开始的入口
  718. if (DebugOnZIMDemo)
  719. {
  720. var webCam = GetComponent<ZIMWebCamera>();
  721. webCam.AdjustResolution(1920, 1080);
  722. mUVCCameraInfo.SetSize(webCam.webCamTexture.width, webCam.webCamTexture.height);
  723. }
  724. }
  725. public void OnLocateScreenEnd()
  726. {
  727. bAutomaticRecognitionEnd = true;
  728. // 记录本次屏幕识别的分辨率(目前采用高分辨率做识别,识别结束后调回低分辨率)
  729. //InfraredDemoMain?.SetResolutionNew(DefaultResolutionIndex);
  730. Resize((int)EnterResolution.x, (int)EnterResolution.y);
  731. if (DebugOnZIMDemo)
  732. {
  733. var webCam = GetComponent<ZIMWebCamera>();
  734. GetComponent<ZIMWebCamera>().AdjustResolution((int)EnterResolution.x, (int)EnterResolution.y);
  735. mUVCCameraInfo.SetSize(webCam.webCamTexture.width, webCam.webCamTexture.height);
  736. }
  737. }
  738. /**
  739. * 修改相机的实际分辨率
  740. */
  741. public void Resize(int width, int height)
  742. {
  743. if (mUVCCameraInfo == null) return;
  744. #if UNITY_ANDROID
  745. //发送修改指令给相机实际分辨率
  746. mUVCCameraInfo.SetCameraSize(width, height);
  747. #endif
  748. #if UNITY_STANDALONE_WIN
  749. // pc todo 看看怎么处理
  750. // ResizePC(width, height);
  751. #endif
  752. //mUVCCameraInfo.SetSize(width, height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  753. Debug.Log($"[ScreenLocate] 开始修改分辨率 mUVCCameraInfo origin:[{mUVCCameraInfo.CurrentWidth},{mUVCCameraInfo.CurrentHeight}]=>target:[{width},{height}]");
  754. // if (screenIdentification.isInitLocateScreen()) screenIdentification.bStartLocateScreen = true;
  755. }
  756. /// <summary>
  757. /// pc修改分辨率
  758. /// </summary>
  759. /// <param name="width"></param>
  760. /// <param name="height"></param>
  761. public void ResizePC(int width, int height)
  762. {
  763. if (mUVCCameraInfo == null) return;
  764. //if (screenIdentification.isInitLocateScreen()) screenIdentification.bStartLocateScreen = true;
  765. // PcWebCamera pcWebCamera = GetComponent<PcWebCamera>();
  766. // if(pcWebCamera.webCamTexture == null || !pcWebCamera.webCamTexture.isPlaying) return;
  767. //StartCoroutine(ResetWebCam(pcWebCamera, width, height));
  768. mUVCCameraInfo.SetSize(width, height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  769. Debug.Log("[ScreenLocate] Resize mUVCCameraInfo.SetSize: [" + mUVCCameraInfo.CurrentWidth + "," + mUVCCameraInfo.CurrentHeight + "]");
  770. }
  771. private System.Collections.IEnumerator ResetWebCam(PcWebCamera pcWebCamera, int newWidth, int newHeight)
  772. {
  773. WebCamTexture _webCamTexture = pcWebCamera.webCamTexture;
  774. // Stop the current WebCamTexture
  775. _webCamTexture.Stop();
  776. // Trigger OnWebCamStopped event
  777. // OnWebCamStopped?.Invoke();
  778. // Wait for a short time to ensure resources are released
  779. yield return new WaitForSeconds(0.5f);
  780. // Create a new WebCamTexture with the new dimensions
  781. _webCamTexture = new WebCamTexture(newWidth, newHeight);
  782. pcWebCamera.webCamTexture = _webCamTexture;
  783. mUVCTexture = _webCamTexture;
  784. // Restart the camera
  785. yield return StartCoroutine(StartWebCam(pcWebCamera));
  786. }
  787. private System.Collections.IEnumerator StartWebCam(PcWebCamera pcWebCamera)
  788. {
  789. WebCamTexture _webCamTexture = pcWebCamera.webCamTexture;
  790. _webCamTexture.Play();
  791. // Wait until the WebCamTexture is playing
  792. while (!_webCamTexture.isPlaying)
  793. {
  794. yield return null;
  795. }
  796. // Trigger OnWebCamStarted event
  797. //OnWebCamStarted?.Invoke();
  798. mUVCCameraInfo.SetSize(_webCamTexture.width, _webCamTexture.height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  799. Debug.Log("[ScreenLocate] ResizePc mUVCCameraInfo.SetSize: [" + mUVCCameraInfo.CurrentWidth + "," + mUVCCameraInfo.CurrentHeight + "]");
  800. // if(screenIdentification.isInitLocateScreen())screenIdentification.bStartLocateScreen = true;
  801. }
  802. #endregion
  803. public void BtnScreenMap()
  804. {
  805. ToMode(Mode.ScreenMap);
  806. }
  807. //进入手动定位屏幕
  808. public void BtnScreenLocateManual()
  809. {
  810. ToMode(Mode.ScreenLocateManual);
  811. }
  812. // 重置屏幕识别的数据
  813. public void ResetScreenIdentification()
  814. {
  815. screenIdentification.Screen.Active = false;
  816. }
  817. // threshold 的值是0-1,0代表最近,1代表最远
  818. public void SetReDoLocateCalibrationRatio(float threshold)
  819. {
  820. const float MIN = 0.005f;
  821. const float MAX = 0.305f;
  822. ReDoLocateCalibrationRatio = MIN + (MAX - MIN) * threshold;
  823. }
  824. /// <summary>
  825. /// 固定的顶点顺序: 左下,右下,左上,右上
  826. /// </summary>
  827. public static List<Vector2> quadUnityVectorList = new();
  828. /// <summary>
  829. /// 打印信息
  830. /// </summary>
  831. /// <param name="list">左下,右下,左上,右上</param>
  832. /// <returns></returns>
  833. public string PrintVector2List(List<Vector2> list)
  834. {
  835. if (screenIdentification == null || !screenIdentification.Screen.Active) return "[]";
  836. string result = "";
  837. if (list.Count == 4)
  838. {
  839. result = "左下" + list[0].ToString() + ",右下" + list[1].ToString() + ",左上" + list[2].ToString() + ",右上" + list[3].ToString();
  840. }
  841. else
  842. {
  843. result = "count != 4 error";
  844. }
  845. //foreach (Vector2 vector in list)
  846. //{
  847. // result += vector.ToString() + " ";
  848. //}
  849. //Debug.Log(result);
  850. return result;
  851. }
  852. /// <summary>
  853. /// 判断是否存在NaN
  854. /// </summary>
  855. /// <param name="vectors"></param>
  856. /// <returns></returns>
  857. public bool ContainsNaN(List<Vector2> vectors)
  858. {
  859. foreach (var v in vectors)
  860. {
  861. if (float.IsNaN(v.x) || float.IsNaN(v.y))
  862. {
  863. return true;
  864. }
  865. }
  866. return false;
  867. }
  868. // 标记屏幕的四个角, ScreenQuadObject 下挂了4个子节点用于标记
  869. public void ShowScreen(RectTransform ScreenQuadObject, QuadrilateralInCamera screen)
  870. {
  871. if (screen == null)
  872. {
  873. Info.text = "识别屏幕失败";
  874. return;
  875. }
  876. Info.text = "已识别到屏幕";
  877. //if (ScreenQuadObject && ScreenQuadObject.childCount >= 4)
  878. //{
  879. // ScreenQuadObject.gameObject.SetActive(true);
  880. // for (int i = 0; i < 4; i++)
  881. // {
  882. // if (DebugOnZIMDemo)
  883. // {
  884. // RectTransform t = ScreenQuadObject.GetChild(i) as RectTransform;
  885. // t.anchoredPosition = screen.Quad[i].pixelToLocalPosition_AnchorCenter(screen.CameraSize, ScreenQuadObject.rect);
  886. // }
  887. // }
  888. //}
  889. quadUnityVectorList = screen.GetUnityVertexNormalizedList(); // 记录四个点
  890. if (!ContainsNaN(quadUnityVectorList))
  891. {
  892. SaveScreenLocateVectorList();
  893. //SyncInfraredDemo();
  894. if (DebugOnZIMDemo)
  895. SyncInfraredScreenPositioningView();
  896. InfraredCameraHelper?.InvokeOnUVCPosUpdate(quadUnityVectorList);
  897. Debug.Log("[ScreenLocate] ShowScreen 已识别到屏幕,更新quadUnityVectorList:" + PrintVector2List(quadUnityVectorList));
  898. }
  899. else
  900. {
  901. Debug.LogError("[ScreenLocate] RefreshCameraSize 屏幕size改变,存在NaN值,重新校准:" + PrintVector2List(quadUnityVectorList));
  902. }
  903. }
  904. public void ShowScreen(QuadrilateralInCamera screen) => ShowScreen(ScreenQuad, screen);
  905. /// <summary>
  906. /// 校准点位置存储到本地
  907. /// </summary>
  908. static public void SaveScreenLocateVectorList()
  909. {
  910. //string saveStr = string.Join(";", quadUnityVectorList.Select(v => $"{v.x},{v.y}")); //,{v.z}
  911. // 如果列表为空,保存空字符串或自定义标记
  912. string saveStr = quadUnityVectorList.Count > 0 ? string.Join(";", quadUnityVectorList.Select(v => $"{v.x},{v.y}")) : "";
  913. Debug.Log("SaveScreenLocateVectorList: " + saveStr);
  914. PlayerPrefs.SetString("ScreenLocateVectorList", saveStr);
  915. }
  916. /// <summary>
  917. /// 获取本地存储校准点位置
  918. /// </summary>
  919. static public bool GetScreenLocateVectorList()
  920. {
  921. string posListStr = PlayerPrefs.GetString("ScreenLocateVectorList", "");
  922. Debug.Log("GetScreenLocateVectorList:" + posListStr);
  923. if (!string.IsNullOrWhiteSpace(posListStr))
  924. {
  925. quadUnityVectorList.Clear();
  926. quadUnityVectorList = posListStr.Split(';')
  927. .Select(s =>
  928. {
  929. string[] parts = s.Split(',');
  930. return new Vector2(float.Parse(parts[0]), float.Parse(parts[1]));
  931. })
  932. .ToList();
  933. return true;
  934. }
  935. else return false;
  936. }
  937. public Vector2 AdjustPointsOffset(Vector2 inputPoint, string type = "CameraLocation")
  938. {
  939. // 计算从原始中心到输入点的偏移量
  940. if (type == "CameraLocation")
  941. {
  942. OldCameraLocationOffset = CameraLocationOffset = inputPoint - screenIdentification.Screen.TransformToCamera(new Vector2(0.5f, 0.5f) * screenIdentification.Screen.UVSize);
  943. return CameraLocationOffset;
  944. }
  945. else
  946. {
  947. //ScreenUV
  948. OldUVOffset = UVOffset = inputPoint - new Vector2(0.5f, 0.5f);
  949. return UVOffset;
  950. }
  951. }
  952. /// <summary>
  953. /// 重置偏移量
  954. /// </summary>
  955. public void ResetPointsOffest()
  956. {
  957. CameraLocationOffset = Vector2.zero;
  958. UVOffset = Vector2.zero;
  959. }
  960. /// <summary>
  961. /// 撤销操作,
  962. /// </summary>
  963. public void RevokePointsOffest() {
  964. CameraLocationOffset = OldCameraLocationOffset;
  965. UVOffset = OldUVOffset;
  966. }
  967. /// <summary>
  968. /// 这里计算一个偏移后的cameraLocatoin位置
  969. /// </summary>
  970. /// <param name="cameraLocatoin"></param>
  971. /// <returns></returns>
  972. public Vector2 GetOffsetCameraLocation(Vector2 cameraLocatoin)
  973. {
  974. return cameraLocatoin - CameraLocationOffset;
  975. }
  976. void ToMode(Mode mode)
  977. {
  978. if (this.mode == mode)
  979. return;
  980. if (mode == Mode.ScreenMap)
  981. {
  982. if (!screenIdentification.Screen.Active)
  983. {
  984. Info.text = "先定位屏幕";
  985. return;
  986. }
  987. Info.text = "按ESC退出";
  988. SetScreen(Color.black);
  989. //Info.transform.SetAsLastSibling();
  990. this.mode = Mode.ScreenMap;
  991. }
  992. else if (mode == Mode.InfraredLocate)
  993. {
  994. Info.text = screenIdentification.Screen.Active ? "已定位屏幕" : "定位屏幕失败";
  995. //Info.text = "已识别到屏幕";
  996. SetScreen(null);
  997. foreach (var i in CrosshairInScreen)
  998. i.gameObject.SetActive(false);
  999. FullScreenImage.gameObject.SetActive(false);
  1000. ScreenPixelCheaker.HideImage();
  1001. //Info.transform.SetSiblingIndex(transform.childCount - 4);
  1002. this.mode = Mode.InfraredLocate;
  1003. #if (!NDEBUG && DEBUG && ENABLE_LOG)
  1004. Console.WriteLine($"{TAG} Mode.InfraredLocate:已识别到屏幕:{screenIdentification.Screen.Active}");
  1005. #endif
  1006. }
  1007. else if (mode == Mode.ScreenLocateManual)
  1008. {
  1009. Info.text = "左键单击屏幕 左下角";
  1010. FullScreenImage.gameObject.SetActive(true);
  1011. ScreenPixelCheaker.ShowImage();
  1012. //Info.transform.SetSiblingIndex(transform.childCount - 1);
  1013. // var newTex = WebCamera.webCamTexture.AutoLight(10);
  1014. //DebugTexture(1, TextureToTexture2D(rawImage.texture));
  1015. CreateUVCTexture2DIfNeeded();
  1016. DebugTexture(7, mUVCTexture2D.zimAutoLight(brightness));
  1017. //mUVCTexture2DTemp = TextureToTexture2D(mUVCCameraInfo.previewTexture);
  1018. //DebugTexture(6, mUVCTexture2DTemp.zimAutoLight(brightness));
  1019. this.mode = Mode.ScreenLocateManual;
  1020. }
  1021. }
  1022. private Texture2D TextureToTexture2D(Texture texture, int width = 0, int height = 0)
  1023. {
  1024. if (width == 0)
  1025. width = texture.width;
  1026. if (height == 0)
  1027. height = texture.height;
  1028. Texture2D _texture2D = new Texture2D(width, height, TextureFormat.ARGB32, false, true);
  1029. RenderTexture currentRT = RenderTexture.active;
  1030. RenderTexture renderTexture = RenderTexture.GetTemporary(
  1031. width,
  1032. height,
  1033. 0,
  1034. RenderTextureFormat.ARGB32,
  1035. RenderTextureReadWrite.Linear);
  1036. Graphics.Blit(texture, renderTexture);
  1037. RenderTexture.active = renderTexture;
  1038. _texture2D.ReadPixels(new Rect(0, 0, width, height), 0, 0);
  1039. _texture2D.Apply();
  1040. RenderTexture.active = currentRT;
  1041. RenderTexture.ReleaseTemporary(renderTexture);
  1042. return _texture2D;
  1043. }
  1044. //public void CreateUVCTexture2DFocusSizeIfNeeded(int width, int height)
  1045. //{
  1046. // if (mUVCTexture2D != null)
  1047. // Destroy(mUVCTexture2D);
  1048. // mUVCTexture2D = TextureToTexture2D(mUVCTexture, width, height);
  1049. //}
  1050. /// <summary>
  1051. /// 使用默认的mUVCTexture宽高
  1052. /// </summary>
  1053. private void CreateUVCTexture2DIfNeeded()
  1054. {
  1055. if (mUVCTexture2D != null)
  1056. Destroy(mUVCTexture2D);
  1057. mUVCTexture2D = TextureToTexture2D(mUVCTexture);
  1058. }
  1059. /// <summary>
  1060. /// 根据宽高调整mUVCTexture2D
  1061. /// </summary>
  1062. /// <param name="width"></param>
  1063. /// <param name="height"></param>
  1064. private void CreateUVCTexture2DIfNeeded(int width = 0, int height = 0)
  1065. {
  1066. if (mUVCTexture2D != null)
  1067. Destroy(mUVCTexture2D);
  1068. mUVCTexture2D = TextureToTexture2D(mUVCTexture, width, height);
  1069. }
  1070. #region DoubleButton
  1071. private DateTime m_firstTime;
  1072. private DateTime m_secondTime;
  1073. private void Press()
  1074. {
  1075. Debug.Log("进入手动定位");
  1076. BtnScreenLocateManual();
  1077. resetTime();
  1078. }
  1079. public void OnDoubleClick()
  1080. {
  1081. //超时重置
  1082. if (!m_firstTime.Equals(default(DateTime)))
  1083. {
  1084. var intervalTime = DateTime.Now - m_firstTime;
  1085. float milliSeconds = intervalTime.Seconds * 1000 + intervalTime.Milliseconds;
  1086. if (milliSeconds >= 400)
  1087. resetTime();
  1088. }
  1089. // 按下按钮时对两次的时间进行记录
  1090. if (m_firstTime.Equals(default(DateTime)))
  1091. m_firstTime = DateTime.Now;
  1092. else
  1093. m_secondTime = DateTime.Now;
  1094. // 在第二次点击触发,时差小于400ms触发
  1095. if (!m_firstTime.Equals(default(DateTime)) && !m_secondTime.Equals(default(DateTime)))
  1096. {
  1097. var intervalTime = m_secondTime - m_firstTime;
  1098. float milliSeconds = intervalTime.Seconds * 1000 + intervalTime.Milliseconds;
  1099. if (milliSeconds < 400)
  1100. Press();
  1101. else
  1102. resetTime();
  1103. }
  1104. }
  1105. private void resetTime()
  1106. {
  1107. m_firstTime = default(DateTime);
  1108. m_secondTime = default(DateTime);
  1109. }
  1110. #endregion
  1111. #region 性能检测相关
  1112. void InvalidateTimings()
  1113. {
  1114. m_ValidHistoryFrames = 0;
  1115. m_AverageTime = float.NaN;
  1116. m_MedianTime = float.NaN;
  1117. m_MinTime = float.NaN;
  1118. m_MaxTime = float.NaN;
  1119. }
  1120. void UpdateInputs()
  1121. {
  1122. //重置
  1123. if (Input.GetKeyDown(KeyCode.UpArrow))
  1124. {
  1125. InvalidateTimings();
  1126. }
  1127. }
  1128. #endregion
  1129. }