ScreenLocate.cs 44 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168
  1. #define ENABLE_LOG
  2. using InfraredManager;
  3. using o0;
  4. using SLAMUVC;
  5. using System;
  6. using System.Collections;
  7. using System.Collections.Generic;
  8. using System.Linq;
  9. using UnityEngine;
  10. using UnityEngine.Experimental.AI;
  11. using UnityEngine.UI;
  12. using ZIM;
  13. using ZIM.Unity;
  14. using static SLAMUVC.UVCManager;
  15. using Color = UnityEngine.Color;
  16. using Time = UnityEngine.Time;
  17. [RequireComponent(typeof(Canvas))]
  18. public partial class ScreenLocate : MonoBehaviour
  19. {
  20. public InfraredCameraHelper InfraredCameraHelper;
  21. private const string TAG = "ScreenLocate#";
  22. public enum InfraredCount : int
  23. {
  24. Single = 1,
  25. Double = 2
  26. }
  27. enum Mode
  28. {
  29. InfraredLocate,
  30. ScreenMap,
  31. ScreenLocateManual
  32. }
  33. enum Platform
  34. {
  35. Window,
  36. Android
  37. }
  38. Platform mPlatform = Platform.Android;
  39. // 2个灯,顺序根据红外灯的大小 由大到小, 坐标通过 InfraredSpot.ScreenUV 和 InfraredSpot.CameraLocation 获得
  40. public InfraredSpot[] InfraredSpots
  41. {
  42. get
  43. {
  44. infraredCount = InfraredCount.Double;
  45. return infraredSpotBuffer;
  46. }
  47. }
  48. // 1个灯, 坐标通过 InfraredSpot.ScreenUV 和 InfraredSpot.CameraLocation 获得
  49. public InfraredSpot InfraredSpotSingle
  50. {
  51. get
  52. {
  53. infraredCount = InfraredCount.Single;
  54. return infraredSpotBuffer[0];
  55. }
  56. }
  57. public InfraredSpot[] infraredSpotBuffer;
  58. public string GetInfraredCount() { return infraredCount.ToString(); }
  59. /// <summary>
  60. /// CameraLocation 的偏移量
  61. /// </summary>
  62. public Vector2 CameraLocationOffset { get; set; } = new Vector2(0, 0);
  63. public Vector2 UVOffset { get; set; } = new Vector2(0, 0);
  64. // public InfraredDemo InfraredDemoMain => FindObjectOfType<InfraredDemo>();
  65. #region UVC 处理的对象
  66. //public UVCManager mUVCManager;
  67. public CameraInfo mUVCCameraInfo;
  68. public bool getUVCCameraInfo => mUVCCameraInfo != null ? true : false;
  69. public Vector2 getUVCCameraInfoSize => getUVCCameraInfo ? mUVCCameraInfo.Size : new Vector2(320, 240);
  70. private Texture mUVCTexture;
  71. public Texture getUVCTexture => mUVCTexture;
  72. public Texture setUVCTexture {
  73. set {
  74. mUVCTexture = value;
  75. }
  76. }
  77. private Texture2D mUVCTexture2D;
  78. // [SerializeField] Texture2DArray mUVCOutArray;
  79. #endregion
  80. public Text Info;
  81. public List<RectTransform> CrosshairInCamera;
  82. public List<RectTransform> CrosshairInScreen;
  83. public RectTransform ScreenQuad;
  84. public Toggle SaveToggle;
  85. public Toggle FullScreenToggle;
  86. public LineGenerator UILineGenerator;
  87. public bool ShowScreenQuad = false;
  88. // output的图像
  89. // 图0是摄像机原图,图1是屏幕识别的全部可选线段,图2是识别出的屏幕画面,图3是识别出的屏幕四条边,图4是图2和图3的叠加,图5显示3种不同颜色的算法识别线段
  90. public List<RawImage> outputRawImages;
  91. [NonSerialized] public Texture[] outputTexture2D = new Texture[8];
  92. public RawImage FullScreenImage;
  93. public PixelCheaker ScreenPixelCheaker;
  94. public InfraredSpotSettings InfraredSpotSettings;
  95. public o0.Geometry2D.Vector<int> CameraSize { get; set; }
  96. public List<Texture2D> DebugScreenImages = new List<Texture2D>();
  97. public bool DebugOnZIMDemo = false;
  98. // private SynchronizationContext mainContext;
  99. //是否单点显示
  100. public bool bSinglePoint = true;//默认单点识别
  101. [NonSerialized] public float ReDoLocateCalibrationRatio = 0.08f; // 重复定位时校准的距离比例,例如先手动定位,再自动定位,会以手动的结果来校准
  102. [NonSerialized] public InfraredCount infraredCount = InfraredCount.Single;
  103. bool bIdentifyRed = true;//默认设备红色
  104. bool bIdentifyGreen = true;
  105. #region 性能检测相关
  106. public Text m_UITime;
  107. const float m_UIUpdateInterval = 0.1f;
  108. float m_UIUpdateTimer = 0.0f;
  109. List<float> m_History = new List<float>(100);
  110. int m_ValidHistoryFrames = 0;
  111. float m_AverageTime = float.NaN;
  112. float m_MedianTime = float.NaN;
  113. float m_MinTime = float.NaN;
  114. float m_MaxTime = float.NaN;
  115. public float updateInterval = 0.5F;
  116. private double lastInterval;
  117. private int frames = 0;
  118. private float fps;
  119. public Text m_FPS;
  120. #endregion
  121. #region PC部分参数
  122. //亮度
  123. public float pcBrightness { get; set; } = 0.0f;
  124. //对比度
  125. public float pcContrast { get; set; } = 0.0f;
  126. #endregion
  127. InfraredLocate infraredLocate;
  128. RectTransform canvas;
  129. Mode mode;
  130. List<(Vector2 pos, GameObject go)> pointManual = new List<(Vector2, GameObject)>();
  131. //o0.Project.WebCam o0WebCam = null;
  132. o0.Project.ScreenIdentification screenIdentification;
  133. public o0.Project.ScreenIdentification ScreenIdentification => screenIdentification;
  134. /// <summary>
  135. /// 正在识别的状态,自动识别时候记录
  136. /// </summary>
  137. bool bAutomaticRecognition { get; set; } = false;//进行捕获时
  138. bool bAutomaticRecognitionStart { get; set; } = false;//是否进行捕获
  139. bool bAutomaticRecognitionEnd { get; set; } = false;//是否结束捕获
  140. [NonSerialized] public RectTransform BackQuad = null;
  141. static public ScreenLocate Main;
  142. static public void AutoLightPixels(Color[] pixels, int width, int height)
  143. {
  144. if (Main.DebugOnZIMDemo)
  145. {
  146. var newTex = pixels.zimAutoLightSimple(width, height);
  147. DebugTexture(7, newTex);
  148. try
  149. {
  150. Main.FullScreenImage.texture = newTex;
  151. }
  152. catch { }
  153. }
  154. }
  155. static public void DebugTexture(int index, Texture texture)
  156. {
  157. LateDestory(Main.outputTexture2D[index]);
  158. Main.outputTexture2D[index] = texture;
  159. try
  160. {
  161. Main.outputRawImages[index].texture = texture;
  162. }
  163. catch { }
  164. }
  165. static void LateDestory(UnityEngine.Object o) => Main.StartCoroutine(Main.LateDestoryIEnum(o));
  166. static public void SetScreen(UnityEngine.Color? color = null)
  167. {
  168. if (Main.BackQuad == null)
  169. {
  170. var canvas = GameObject.Find("WebCameraView").GetComponent<RectTransform>();
  171. var background = canvas.Find("Background");
  172. Main.BackQuad = background.GetChild(0).GetComponent<RectTransform>();
  173. }
  174. Main.BackQuad.parent.gameObject.SetActive(color != null);
  175. Main.BackQuad.GetComponent<RawImage>().color = color ?? Color.black;
  176. //Debug.Log("Set Screen " + color.GetColorName());
  177. }
  178. static public void SetScreen(Rect rect, UnityEngine.Color? color = null)
  179. {
  180. if (Main.BackQuad == null)
  181. {
  182. var canvas = GameObject.Find("WebCameraView").GetComponent<RectTransform>();
  183. var background = canvas.Find("Background");
  184. Main.BackQuad = background.GetChild(0).GetComponent<RectTransform>();
  185. }
  186. Main.BackQuad.parent.gameObject.SetActive(color != null);
  187. Main.BackQuad.anchorMin = rect.min;
  188. Main.BackQuad.anchorMax = rect.max;
  189. Main.BackQuad.GetComponent<RawImage>().color = color ?? Color.black;
  190. //Debug.Log("Set Screen " + color.GetColorName());
  191. }
  192. static void DebugBackQuad(Rect? rect = null)
  193. {
  194. if (Main.BackQuad)
  195. {
  196. Main.BackQuad.parent.GetComponent<RawImage>().enabled = false;
  197. Main.BackQuad.GetComponent<RawImage>().color = Color.white;
  198. Main.BackQuad.parent.gameObject.SetActive(!Main.BackQuad.parent.gameObject.activeSelf);
  199. if (rect.HasValue)
  200. {
  201. Main.BackQuad.anchorMin = rect.Value.min;
  202. Main.BackQuad.anchorMax = rect.Value.max;
  203. }
  204. }
  205. }
  206. public void ReSizeTexture(int width, int height)
  207. {
  208. Debug.Log("Cur mUVCTexture Size: [" + mUVCTexture.width + "," + mUVCTexture.height + "]");
  209. return;
  210. if (mUVCTexture.width < width || mUVCTexture.height < height) // 如果当前分辨率太小,则重新new一个texture
  211. {
  212. Texture2D tex = new Texture2D(
  213. width, height,
  214. TextureFormat.ARGB32,
  215. false, /* mipmap */
  216. true /* linear */);
  217. tex.filterMode = FilterMode.Point;
  218. tex.Apply();
  219. mUVCTexture = tex;
  220. mUVCCameraInfo.previewTexture = tex;
  221. var nativeTexPtr = mUVCCameraInfo.previewTexture.GetNativeTexturePtr();
  222. }
  223. }
  224. void Awake()
  225. {
  226. Main = this;
  227. #if !UNITY_EDITOR_WIN
  228. DebugOnZIMDemo = false;
  229. #endif
  230. //if (mUVCDrawer)
  231. // mUVCDrawer.StartPreviewAction += UVCIsReady;
  232. }
  233. void OnDestroy()
  234. {
  235. //if (mUVCDrawer)
  236. // mUVCDrawer.StartPreviewAction -= UVCIsReady;
  237. }
  238. void Start()
  239. {
  240. //mainContext = SynchronizationContext.Current;
  241. canvas = transform.GetComponent<RectTransform>();
  242. mode = Mode.InfraredLocate;
  243. if (DebugScreenImages.Count != 0 && DebugOnZIMDemo)
  244. {
  245. screenIdentification = new o0.Project.ScreenIdentification();
  246. screenIdentification.LocateScreen();
  247. }
  248. infraredCount = InfraredCount.Single;
  249. #region 性能检测相关
  250. for (var i = 0; i < m_History.Capacity; ++i)
  251. {
  252. m_History.Add(0.0f);
  253. }
  254. lastInterval = Time.realtimeSinceStartup;
  255. frames = 0;
  256. #endregion
  257. }
  258. IEnumerator LateDestoryIEnum(UnityEngine.Object o)
  259. {
  260. if (o)
  261. {
  262. yield return new WaitForEndOfFrame();
  263. Destroy(o);
  264. }
  265. }
  266. //ZIMWebCamera场景使用
  267. public void WebCamIsReady(Texture texture)
  268. {
  269. mPlatform = Platform.Window;
  270. mUVCTexture = texture;
  271. mUVCCameraInfo = new CameraInfo(mUVCTexture);
  272. brightness = 0;
  273. //UVC准备好
  274. InfraredCameraHelper?.InvokeOnUVCIsReady(mUVCCameraInfo);
  275. }
  276. /// <summary>
  277. /// UVCManager 创建初始化时候,更新此函数
  278. /// </summary>
  279. /// <param name="cameraInfo"></param>
  280. public void UVCIsReady(CameraInfo cameraInfo)
  281. {
  282. mPlatform = Platform.Android;
  283. mUVCTexture = cameraInfo.previewTexture;
  284. mUVCCameraInfo = cameraInfo;
  285. Debug.Log("UVCIsReady:" + mUVCCameraInfo);
  286. //UVC准备好
  287. InfraredCameraHelper?.InvokeOnUVCIsReady(mUVCCameraInfo);
  288. }
  289. /// <summary>
  290. /// 获取新的 previewTexture
  291. /// </summary>
  292. public void UVCUpdate(bool bChange)
  293. {
  294. mUVCTexture = mUVCCameraInfo.previewTexture;
  295. Debug.Log("[ScreenLocate] UVCUpdate:" + mUVCCameraInfo + ",bChange:"+bChange);
  296. InfraredCameraHelper?.InvokeOnUVCIsUpdate();
  297. //这里判断是否进入自动识别?
  298. if (bAutomaticRecognitionStart) {
  299. bAutomaticRecognitionStart = false;
  300. Debug.Log("[ScreenLocate] UVCUpdate 开始自动识别 Capture:" + Capture + " ,Delay: " + Delay);
  301. screenIdentification.LocateScreen(Capture, Delay);
  302. }
  303. if (bAutomaticRecognitionEnd) {
  304. bAutomaticRecognitionEnd = false;
  305. Debug.Log("[ScreenLocate] UVCUpdate 结束捕获,当前摄像机分辨率为: " + mUVCCameraInfo.Size);
  306. bAutomaticRecognition = false;
  307. }
  308. }
  309. int brightness = 0;
  310. /// <summary>
  311. /// 设置算法红外灯的亮度值
  312. /// </summary>
  313. /// <param name="value"></param>
  314. public void SetInfraredLocateBrightnessThreshold(float value)
  315. {
  316. if (infraredLocate != null)
  317. {
  318. if (value >= 0 && value <= 1)
  319. infraredLocate.SetBrightnessThreshold(value); // 参数是 红外灯的亮度阈值,阈值越小能够检测到的亮度就越低,默认值是0.93
  320. }
  321. }
  322. void Update()
  323. {
  324. //++frames;
  325. //float timeNow = Time.realtimeSinceStartup;
  326. //if (timeNow > lastInterval + updateInterval)
  327. //{
  328. // fps = (float)(frames / (timeNow - lastInterval));
  329. // frames = 0;
  330. // lastInterval = timeNow;
  331. //}
  332. //if (m_FPS != null)
  333. // m_FPS.text = "FPS:" + fps.ToString("f2");
  334. if (mUVCCameraInfo == null) return;
  335. if (screenIdentification == null)
  336. {
  337. screenIdentification = new o0.Project.ScreenIdentification();
  338. //pc 不切换分辨率了
  339. #if UNITY_ANDROID
  340. //screenIdentification.OnLocateScreenEnter += OnLocateScreenEnter;
  341. screenIdentification.OnLocateScreenEnd += OnLocateScreenEnd;
  342. #endif
  343. }
  344. if (infraredLocate == null)
  345. {
  346. infraredLocate = new InfraredLocate(mUVCCameraInfo, screenIdentification, InfraredSpotSettings, ScreenPixelCheaker);
  347. //InfraredDemo 初始化
  348. //float redfilterValue = PlayerPrefs.GetFloat("Init redFilterSliderValue", 0.8f);
  349. //Debug.Log("Init Red filterValue:" + redfilterValue);
  350. //infraredLocate.SetBrightnessThreshold(redfilterValue); // 参数是 红外灯的亮度阈值,阈值越小能够检测到的亮度就越低,默认值是0.93
  351. }
  352. if (screenIdentification.Screen.RefreshCameraSize(getUVCCameraInfoSize)) // 同步分辨率, 分辨率变化后还需同步到InfraredDemo
  353. {
  354. quadUnityVectorList = screenIdentification.Screen.QuadInCamera.GetUnityVertexNormalizedList();
  355. if (!ContainsNaN(quadUnityVectorList))
  356. {
  357. SaveScreenLocateVectorList();
  358. //SyncInfraredDemo();
  359. //SyncInfraredScreenPositioningView();
  360. InfraredCameraHelper?.InvokeOnUVCPosUpdate(quadUnityVectorList);
  361. Debug.Log("[ScreenLocate] RefreshCameraSize 屏幕size改变:[" + (int)getUVCCameraInfoSize.x + "," + (int)getUVCCameraInfoSize.y + "]");
  362. Debug.Log("[ScreenLocate] RefreshCameraSize 屏幕size改变,刷新quadUnityVectorList:" + PrintVector2List(quadUnityVectorList));
  363. }
  364. else {
  365. Debug.LogError("[ScreenLocate] RefreshCameraSize 屏幕size改变,存在NaN值,重新校准:" + PrintVector2List(quadUnityVectorList));
  366. }
  367. if (DebugOnZIMDemo)
  368. Main.ShowScreen(Main.ScreenQuad, screenIdentification.Screen.QuadInCamera);
  369. }
  370. //var t0 = Time.realtimeSinceStartup;
  371. /* New*/
  372. //Debug.Log((mUVCCameraInfo != null) +" = "+ mUVCCameraInfo.IsPreviewing + " = "+ screenIdentification.Screen.Active);
  373. if (mUVCCameraInfo != null && mUVCCameraInfo.IsPreviewing) // 成功定位屏幕后才做红外识别
  374. {
  375. //if (bAutomaticRecognition)
  376. //{
  377. // //识别的过程使用的分辨率
  378. // //CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  379. // if (log1)
  380. // {
  381. // log1 = false;
  382. // Debug.Log("[ScreenLocate] log1:[" + (int)getUVCCameraInfoSize.x + ", " + (int)getUVCCameraInfoSize.y + "]");
  383. // }
  384. //}
  385. //else
  386. //{
  387. // //自动识别完成后使用相机分辨率大小 getUVCCameraInfoSize
  388. // //CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  389. // if (log2)
  390. // {
  391. // log2 = false;
  392. // Debug.Log("[ScreenLocate] log2:[" + (int)getUVCCameraInfoSize.x + ", " + (int)getUVCCameraInfoSize.y + "]");
  393. // }
  394. //}
  395. //如果是连接了蓝牙设备,并且不是9轴设备。不进行识别算法处理
  396. if (BluetoothAim.ins?.status == BluetoothStatusEnum.ConnectSuccess && AimHandler.ins && AimHandler.ins.bRuning9Axis()) return;
  397. //根据getUVCCameraInfoSize 分辨率渲染
  398. CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  399. if (!screenIdentification.Update(mUVCTexture2D))
  400. {
  401. CameraSize = new o0.Geometry2D.Vector<int>(mUVCTexture2D.width, mUVCTexture2D.height);
  402. var pixels = mUVCTexture2D.GetPixels(); // 从左往右、从下往上
  403. AutoLightPixels(pixels, CameraSize.x, CameraSize.y);
  404. //return;
  405. //InfraredSpots = infraredLocate.Update(pixels);
  406. if (bSinglePoint)
  407. infraredSpotBuffer = infraredLocate.UpdateSingle(pixels);
  408. else
  409. infraredSpotBuffer = infraredLocate.Update(pixels);
  410. if (mode == Mode.ScreenLocateManual)
  411. {
  412. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  413. {
  414. if (infraredSpotBuffer[i].CameraLocation != null)
  415. {
  416. // 检测到光点
  417. var posInCanvas = infraredSpotBuffer[i].CameraLocation.Value.pixelToLocalPosition_AnchorCenter(CameraSize, FullScreenImage.rectTransform.rect);
  418. CrosshairInCamera[i].gameObject.SetActive(true);
  419. CrosshairInCamera[i].anchoredPosition = posInCanvas;
  420. }
  421. else
  422. CrosshairInCamera[i].gameObject.SetActive(false);
  423. }
  424. }
  425. else if(mode == Mode.InfraredLocate)
  426. {
  427. if (mPlatform == Platform.Window) //渲染ui上面的点。进入游戏可以隐藏
  428. {
  429. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  430. {
  431. if (infraredSpotBuffer[i].CameraLocation != null)
  432. {
  433. // 检测到光点
  434. var posInCanvas = infraredSpotBuffer[i].CameraLocation.Value.pixelToLocalPosition_AnchorCenter(CameraSize, outputRawImages[0].rectTransform.rect);
  435. CrosshairInCamera[i].gameObject.SetActive(true);
  436. CrosshairInCamera[i].anchoredPosition = posInCanvas;
  437. }
  438. else
  439. CrosshairInCamera[i].gameObject.SetActive(false);
  440. }
  441. }
  442. //手机端使用 mPlatform == Platform.Android &&
  443. //通用,手机 和 PC
  444. if (infraredSpotBuffer.Length > 0)
  445. {
  446. int redIndex = 0;
  447. int greenIndex = 1;
  448. //仅仅第一个点显示(如果最大点出界了会闪烁)
  449. if (bSinglePoint)
  450. {
  451. redIndex = 0; //单点识别是,可以选择切换颜色
  452. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  453. {
  454. string str = "Single:";
  455. Info.text = str + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  456. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  457. onFilterPos(infraredSpotBuffer[redIndex].ScreenUV.Value);
  458. }
  459. }
  460. else
  461. {
  462. //雙點模式下選擇第一個點
  463. if (bIdentifyRed && !bIdentifyGreen)
  464. {
  465. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  466. {
  467. Info.text = "Red" + redIndex + ":" + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  468. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  469. onFilterPos2(infraredSpotBuffer[redIndex].ScreenUV.Value, redIndex);
  470. }
  471. else
  472. {
  473. Info.text = "未检测到红色最大点!";
  474. }
  475. }
  476. else if (!bIdentifyRed && bIdentifyGreen)
  477. {
  478. if (infraredSpotBuffer[greenIndex].ScreenUV != null)
  479. {
  480. Info.text = "Green:" + infraredSpotBuffer[greenIndex].ScreenUV.Value.ToString("F4");
  481. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[greenIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[greenIndex].ScreenUV.Value.y * Screen.height, 0));
  482. onFilterPos2(infraredSpotBuffer[greenIndex].ScreenUV.Value, greenIndex);
  483. }
  484. else
  485. {
  486. Info.text = "未检测到绿色点!";
  487. }
  488. }
  489. else
  490. {
  491. //两个不选择和两个全选都跑识别两个点
  492. //自動切換 检测到光点
  493. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  494. {
  495. Info.text = "Red:" + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  496. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  497. onFilterPos2(infraredSpotBuffer[redIndex].ScreenUV.Value, redIndex);
  498. }
  499. else if (infraredSpotBuffer[greenIndex].ScreenUV != null)
  500. {
  501. Info.text = "Green:" + infraredSpotBuffer[greenIndex].ScreenUV.Value.ToString("F4");
  502. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[greenIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[greenIndex].ScreenUV.Value.y * Screen.height, 0));
  503. onFilterPos2(infraredSpotBuffer[greenIndex].ScreenUV.Value, greenIndex);
  504. }
  505. else
  506. {
  507. Info.text = "未检测到点!";
  508. }
  509. }
  510. }
  511. }
  512. }
  513. else if (mode == Mode.ScreenMap && DebugOnZIMDemo)
  514. {
  515. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  516. {
  517. if (infraredSpotBuffer[i].ScreenUV != null)
  518. {
  519. // 检测到光点
  520. var posInCanvas = infraredSpotBuffer[i].ScreenUV.Value.pixelToLocalPosition_AnchorCenter(new Vector2(1, 1), canvas.rect);
  521. CrosshairInScreen[i].gameObject.SetActive(true);
  522. CrosshairInScreen[i].anchoredPosition = posInCanvas;
  523. }
  524. else
  525. CrosshairInScreen[i].gameObject.SetActive(false);
  526. }
  527. if (Input.GetKeyDown(KeyCode.Escape))
  528. ToMode(Mode.InfraredLocate);
  529. }
  530. }
  531. }
  532. //var t1 = Time.realtimeSinceStartup;
  533. //var dt = t1 - t0;
  534. //m_History[m_ValidHistoryFrames % m_History.Count] = dt;
  535. //++m_ValidHistoryFrames;
  536. //m_UIUpdateTimer += Time.deltaTime;
  537. //if (m_UIUpdateTimer >= m_UIUpdateInterval)
  538. //{
  539. // m_UIUpdateTimer = 0.0f;
  540. // if (m_ValidHistoryFrames >= m_History.Count)
  541. // {
  542. // m_ValidHistoryFrames = 0;
  543. // m_AverageTime = 0.0f;
  544. // m_MinTime = float.PositiveInfinity;
  545. // m_MaxTime = float.NegativeInfinity;
  546. // {
  547. // for (var i = 0; i < m_History.Count; i++)
  548. // {
  549. // var time = m_History[i];
  550. // m_AverageTime += time;
  551. // m_MinTime = Mathf.Min(m_MinTime, time);
  552. // m_MaxTime = Mathf.Max(m_MaxTime, time);
  553. // }
  554. // m_AverageTime /= m_History.Count;
  555. // }
  556. // {
  557. // m_History.Sort();
  558. // // Odd-length history?
  559. // if ((m_History.Count & 1) != 0)
  560. // {
  561. // m_MedianTime = m_History[m_History.Count / 2];
  562. // }
  563. // else
  564. // {
  565. // m_MedianTime = (m_History[m_History.Count / 2] + m_History[m_History.Count / 2 - 1]) / 2.0f;
  566. // }
  567. // }
  568. // }
  569. // var statistics = $"{m_History.Count} 帧样本:\naverage: {m_AverageTime * 1000.0f:F2}ms\nmedian: {m_MedianTime * 1000.0f:F2}ms\nmin: {m_MinTime * 1000.0f:F2}ms\nmax: {m_MaxTime * 1000.0f:F2}ms\n";
  570. // //Method: {m_Method} {UnityEngine.SceneManagement.SceneManager.GetActiveScene().name} |
  571. // if (m_UITime != null)
  572. // m_UITime.text = $"Cam: {mUVCCameraInfo.CurrentWidth}x{mUVCCameraInfo.CurrentHeight}{(mUVCTexture2D? ",T2D:" : "")}{(mUVCTexture2D? mUVCTexture2D.width+ "x" : "")}{(mUVCTexture2D ? mUVCTexture2D.height:"")} \nLast Frame: {dt * 1000.0f:F2}ms \n{statistics}";
  573. //}
  574. //UpdateInputs();
  575. }
  576. Vector2 targetPos = Vector2.zero;
  577. Vector2 movePos = Vector2.zero;
  578. int moveSpeed = 20;
  579. public float filterDis = 3.0f;
  580. void onFilterPos(Vector2 _vector2Pos)
  581. {
  582. //主要用于模拟九轴时候的
  583. //添加一个偏移量,使得最后输出的准心是指向正中心
  584. Vector2 np = new Vector2((_vector2Pos.x - UVOffset.x) * Screen.width, (_vector2Pos.y - UVOffset.y) * Screen.height); //_vector2Pos.pixelToLocalPosition_AnchorCenter(Vector2.one, (transform as RectTransform).rect);
  585. if (Vector2.Distance(np, targetPos) >= filterDis)
  586. {
  587. targetPos = np;
  588. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(targetPos.x, targetPos.y, 0));
  589. //Vector2 np = new Vector2(uvCenterOffset.x * Screen.width, uvCenterOffset.y * Screen.height);
  590. //point -= np;
  591. InfraredCameraHelper?.InvokeOnPositionUpdate(targetPos);
  592. }
  593. //movePos = Vector3.Lerp(movePos, targetPos, Time.deltaTime * moveSpeed);
  594. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(movePos.x, movePos.y, 0));
  595. }
  596. Vector2[] _targetPoints2 = new Vector2[] { Vector2.zero, Vector2.zero };
  597. void onFilterPos2(Vector2 _vector2Pos, int index)
  598. {
  599. Vector2 np = new Vector2((_vector2Pos.x - UVOffset.x) * Screen.width, (_vector2Pos.y - UVOffset.y) * Screen.height);
  600. if (Vector2.Distance(np, _targetPoints2[index]) >= filterDis)
  601. {
  602. _targetPoints2[index] = np;
  603. InfraredCameraHelper.InvokeOnPositionUpdate2(_targetPoints2[index], index);
  604. }
  605. }
  606. #region 自动识别
  607. int Capture = 30;
  608. int Delay = 30;
  609. Vector2 EnterResolution;
  610. // int DefaultResolutionIndex;
  611. // readonly public int HighScreenLocateResolutionIndex = 2; // 自动识别时,摄像机分辨率固定为1280 * 720 ( 对应索引是2 )
  612. public void BtnScreenLocate()
  613. {
  614. if (DebugScreenImages.Count != 0)
  615. {
  616. screenIdentification = new o0.Project.ScreenIdentification();
  617. CameraSize = new o0.Geometry2D.Vector<int>(DebugScreenImages[0].width, DebugScreenImages[0].height);
  618. WebCamIsReady(DebugScreenImages[0]);
  619. CreateUVCTexture2DIfNeeded();
  620. }
  621. //Debug.Log("BtnScreenLocate Capture:" + Capture + " ,Delay: " + Delay);
  622. //screenIdentification.LocateScreen(Capture, Delay);
  623. OnLocateScreenEnter();
  624. }
  625. // bool log1 = false, log2 = false;
  626. public void OnLocateScreenEnter()
  627. {
  628. bAutomaticRecognition = true;
  629. bAutomaticRecognitionStart = true;
  630. ResetScreenIdentification();
  631. //DefaultResolutionIndex = InfraredDemoMain?.ResolutionIndex ?? 0; // 记录一下进入前的分辨率(游戏场景的分辨率,比识别时更低)
  632. //HighScreenLocateResolutionIndex = InfraredDemoMain.getTextureToResolutionNewIndex(); // index = 0
  633. // Debug.Log("[ScreenLocate] 开始捕获 DefaultResolutionIndex:" + DefaultResolutionIndex + " ,HighScreenLocateResolutionIndex:" + HighScreenLocateResolutionIndex);
  634. // InfraredDemoMain?.SetResolutionNew(HighScreenLocateResolutionIndex);
  635. EnterResolution = mUVCCameraInfo.Size;// 记录一下进入前的分辨率(游戏场景的分辨率,比识别时更低)
  636. Vector2 _HighResolution = mUVCCameraInfo.CurrentCalibrationResolution; //最高的分辨率
  637. Resize((int)_HighResolution.x, (int)_HighResolution.y);
  638. if (DebugOnZIMDemo)
  639. screenIdentification.LocateScreen();
  640. //CreateUVCTexture2DIfNeeded();
  641. // log1 = true;
  642. // log2 = true;
  643. }
  644. public void OnLocateScreenEnd()
  645. {
  646. bAutomaticRecognitionEnd = true;
  647. // 记录本次屏幕识别的分辨率(目前采用高分辨率做识别,识别结束后调回低分辨率)
  648. //InfraredDemoMain?.SetResolutionNew(DefaultResolutionIndex);
  649. Resize((int)EnterResolution.x, (int)EnterResolution.y);
  650. }
  651. /**
  652. * 修改相机的实际分辨率
  653. */
  654. public void Resize(int width, int height)
  655. {
  656. if (mUVCCameraInfo == null) return;
  657. #if UNITY_ANDROID
  658. //发送修改指令给相机实际分辨率
  659. mUVCCameraInfo.SetCameraSize(width, height);
  660. #endif
  661. #if UNITY_STANDALONE_WIN
  662. // pc todo 看看怎么处理
  663. // ResizePC(width, height);
  664. #endif
  665. //mUVCCameraInfo.SetSize(width, height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  666. Debug.Log($"[ScreenLocate] 开始修改分辨率 mUVCCameraInfo origin:[{ mUVCCameraInfo.CurrentWidth },{ mUVCCameraInfo.CurrentHeight }]=>target:[{ width },{ height }]");
  667. // if (screenIdentification.isInitLocateScreen()) screenIdentification.bStartLocateScreen = true;
  668. }
  669. /// <summary>
  670. /// pc修改分辨率
  671. /// </summary>
  672. /// <param name="width"></param>
  673. /// <param name="height"></param>
  674. public void ResizePC(int width, int height)
  675. {
  676. if (mUVCCameraInfo == null) return;
  677. //if (screenIdentification.isInitLocateScreen()) screenIdentification.bStartLocateScreen = true;
  678. // PcWebCamera pcWebCamera = GetComponent<PcWebCamera>();
  679. // if(pcWebCamera.webCamTexture == null || !pcWebCamera.webCamTexture.isPlaying) return;
  680. //StartCoroutine(ResetWebCam(pcWebCamera, width, height));
  681. mUVCCameraInfo.SetSize(width, height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  682. Debug.Log("[ScreenLocate] Resize mUVCCameraInfo.SetSize: [" + mUVCCameraInfo.CurrentWidth + "," + mUVCCameraInfo.CurrentHeight + "]");
  683. }
  684. private System.Collections.IEnumerator ResetWebCam(PcWebCamera pcWebCamera, int newWidth, int newHeight)
  685. {
  686. WebCamTexture _webCamTexture = pcWebCamera.webCamTexture;
  687. // Stop the current WebCamTexture
  688. _webCamTexture.Stop();
  689. // Trigger OnWebCamStopped event
  690. // OnWebCamStopped?.Invoke();
  691. // Wait for a short time to ensure resources are released
  692. yield return new WaitForSeconds(0.5f);
  693. // Create a new WebCamTexture with the new dimensions
  694. _webCamTexture = new WebCamTexture(newWidth, newHeight);
  695. pcWebCamera.webCamTexture = _webCamTexture;
  696. mUVCTexture = _webCamTexture;
  697. // Restart the camera
  698. yield return StartCoroutine(StartWebCam(pcWebCamera));
  699. }
  700. private System.Collections.IEnumerator StartWebCam(PcWebCamera pcWebCamera)
  701. {
  702. WebCamTexture _webCamTexture = pcWebCamera.webCamTexture;
  703. _webCamTexture.Play();
  704. // Wait until the WebCamTexture is playing
  705. while (!_webCamTexture.isPlaying)
  706. {
  707. yield return null;
  708. }
  709. // Trigger OnWebCamStarted event
  710. //OnWebCamStarted?.Invoke();
  711. mUVCCameraInfo.SetSize(_webCamTexture.width, _webCamTexture.height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  712. Debug.Log("[ScreenLocate] ResizePc mUVCCameraInfo.SetSize: [" + mUVCCameraInfo.CurrentWidth + "," + mUVCCameraInfo.CurrentHeight + "]");
  713. // if(screenIdentification.isInitLocateScreen())screenIdentification.bStartLocateScreen = true;
  714. }
  715. #endregion
  716. public void BtnScreenMap()
  717. {
  718. ToMode(Mode.ScreenMap);
  719. }
  720. //进入手动定位屏幕
  721. public void BtnScreenLocateManual()
  722. {
  723. ToMode(Mode.ScreenLocateManual);
  724. }
  725. // 重置屏幕识别的数据
  726. public void ResetScreenIdentification()
  727. {
  728. screenIdentification.Screen.Active = false;
  729. }
  730. /// <summary>
  731. /// 固定的顶点顺序: 左下,右下,左上,右上
  732. /// </summary>
  733. public static List<Vector2> quadUnityVectorList = new();
  734. /// <summary>
  735. /// 打印信息
  736. /// </summary>
  737. /// <param name="list">左下,右下,左上,右上</param>
  738. /// <returns></returns>
  739. public string PrintVector2List(List<Vector2> list)
  740. {
  741. if (screenIdentification == null || !screenIdentification.Screen.Active) return "[]";
  742. string result = "";
  743. if (list.Count == 4)
  744. {
  745. result = "左下" + list[0].ToString() + ",右下" + list[1].ToString() + ",左上" + list[2].ToString() + ",右上" + list[3].ToString();
  746. }
  747. else
  748. {
  749. result = "count != 4 error";
  750. }
  751. //foreach (Vector2 vector in list)
  752. //{
  753. // result += vector.ToString() + " ";
  754. //}
  755. //Debug.Log(result);
  756. return result;
  757. }
  758. /// <summary>
  759. /// 判断是否存在NaN
  760. /// </summary>
  761. /// <param name="vectors"></param>
  762. /// <returns></returns>
  763. public bool ContainsNaN(List<Vector2> vectors)
  764. {
  765. foreach (var v in vectors)
  766. {
  767. if (float.IsNaN(v.x) || float.IsNaN(v.y))
  768. {
  769. return true;
  770. }
  771. }
  772. return false;
  773. }
  774. // 标记屏幕的四个角, ScreenQuadObject 下挂了4个子节点用于标记
  775. public void ShowScreen(RectTransform ScreenQuadObject, QuadrilateralInCamera screen)
  776. {
  777. if (screen == null)
  778. {
  779. Info.text = "识别屏幕失败";
  780. return;
  781. }
  782. Info.text = "已识别到屏幕";
  783. if (ScreenQuadObject && ScreenQuadObject.childCount >= 4)
  784. {
  785. ScreenQuadObject.gameObject.SetActive(true);
  786. for (int i = 0; i < 4; i++)
  787. {
  788. if (DebugOnZIMDemo)
  789. {
  790. RectTransform t = ScreenQuadObject.GetChild(i) as RectTransform;
  791. t.anchoredPosition = screen.Quad[i].pixelToLocalPosition_AnchorCenter(screen.CameraSize, ScreenQuadObject.rect);
  792. }
  793. }
  794. }
  795. quadUnityVectorList = screen.GetUnityVertexNormalizedList(); // 记录四个点
  796. if (!ContainsNaN(quadUnityVectorList))
  797. {
  798. SaveScreenLocateVectorList();
  799. //SyncInfraredDemo();
  800. if (DebugOnZIMDemo)
  801. SyncInfraredScreenPositioningView();
  802. InfraredCameraHelper?.InvokeOnUVCPosUpdate(quadUnityVectorList);
  803. Debug.Log("[ScreenLocate] ShowScreen 已识别到屏幕,更新quadUnityVectorList:" + PrintVector2List(quadUnityVectorList));
  804. }
  805. else
  806. {
  807. Debug.LogError("[ScreenLocate] RefreshCameraSize 屏幕size改变,存在NaN值,重新校准:" + PrintVector2List(quadUnityVectorList));
  808. }
  809. }
  810. public void ShowScreen(QuadrilateralInCamera screen) => ShowScreen(ScreenQuad, screen);
  811. /// <summary>
  812. /// 校准点位置存储到本地
  813. /// </summary>
  814. static public void SaveScreenLocateVectorList()
  815. {
  816. string saveStr = string.Join(";", quadUnityVectorList.Select(v => $"{v.x},{v.y}")); //,{v.z}
  817. Debug.Log("SaveScreenLocateVectorList: " + saveStr);
  818. PlayerPrefs.SetString("ScreenLocateVectorList", saveStr);
  819. }
  820. /// <summary>
  821. /// 获取本地存储校准点位置
  822. /// </summary>
  823. static public bool GetScreenLocateVectorList()
  824. {
  825. string posListStr = PlayerPrefs.GetString("ScreenLocateVectorList", "");
  826. Debug.Log("GetScreenLocateVectorList:"+ posListStr);
  827. if (!string.IsNullOrWhiteSpace(posListStr))
  828. {
  829. quadUnityVectorList.Clear();
  830. quadUnityVectorList = posListStr.Split(';')
  831. .Select(s =>
  832. {
  833. string[] parts = s.Split(',');
  834. return new Vector2(float.Parse(parts[0]), float.Parse(parts[1]));
  835. })
  836. .ToList();
  837. return true;
  838. }
  839. else return false;
  840. }
  841. public Vector2 AdjustPointsOffset(Vector2 inputPoint,string type = "CameraLocation")
  842. {
  843. // 计算从原始中心到输入点的偏移量
  844. if (type == "CameraLocation")
  845. {
  846. CameraLocationOffset = inputPoint - screenIdentification.Screen.TransformToCamera(new Vector2(0.5f, 0.5f) * screenIdentification.Screen.UVSize);
  847. return CameraLocationOffset;
  848. }
  849. else {
  850. //ScreenUV
  851. UVOffset = inputPoint - new Vector2(0.5f, 0.5f);
  852. return UVOffset;
  853. }
  854. }
  855. /// <summary>
  856. /// 这里计算一个偏移后的cameraLocatoin位置
  857. /// </summary>
  858. /// <param name="cameraLocatoin"></param>
  859. /// <returns></returns>
  860. public Vector2 GetOffsetCameraLocation(Vector2 cameraLocatoin) {
  861. return cameraLocatoin - CameraLocationOffset;
  862. }
  863. void ToMode(Mode mode)
  864. {
  865. if (this.mode == mode)
  866. return;
  867. if (mode == Mode.ScreenMap)
  868. {
  869. if (!screenIdentification.Screen.Active)
  870. {
  871. Info.text = "先定位屏幕";
  872. return;
  873. }
  874. Info.text = "按ESC退出";
  875. SetScreen(Color.black);
  876. //Info.transform.SetAsLastSibling();
  877. this.mode = Mode.ScreenMap;
  878. }
  879. else if (mode == Mode.InfraredLocate)
  880. {
  881. Info.text = screenIdentification.Screen.Active ? "已定位屏幕" : "定位屏幕失败";
  882. //Info.text = "已识别到屏幕";
  883. SetScreen(null);
  884. foreach (var i in CrosshairInScreen)
  885. i.gameObject.SetActive(false);
  886. FullScreenImage.gameObject.SetActive(false);
  887. ScreenPixelCheaker.HideImage();
  888. //Info.transform.SetSiblingIndex(transform.childCount - 4);
  889. this.mode = Mode.InfraredLocate;
  890. #if (!NDEBUG && DEBUG && ENABLE_LOG)
  891. Console.WriteLine($"{TAG} Mode.InfraredLocate:已识别到屏幕:{screenIdentification.Screen.Active}");
  892. #endif
  893. }
  894. else if (mode == Mode.ScreenLocateManual)
  895. {
  896. Info.text = "左键单击屏幕 左下角";
  897. FullScreenImage.gameObject.SetActive(true);
  898. ScreenPixelCheaker.ShowImage();
  899. //Info.transform.SetSiblingIndex(transform.childCount - 1);
  900. // var newTex = WebCamera.webCamTexture.AutoLight(10);
  901. //DebugTexture(1, TextureToTexture2D(rawImage.texture));
  902. CreateUVCTexture2DIfNeeded();
  903. DebugTexture(7, mUVCTexture2D.zimAutoLight(brightness));
  904. //mUVCTexture2DTemp = TextureToTexture2D(mUVCCameraInfo.previewTexture);
  905. //DebugTexture(6, mUVCTexture2DTemp.zimAutoLight(brightness));
  906. this.mode = Mode.ScreenLocateManual;
  907. }
  908. }
  909. private Texture2D TextureToTexture2D(Texture texture, int width = 0, int height = 0)
  910. {
  911. if (width == 0)
  912. width = texture.width;
  913. if (height == 0)
  914. height = texture.height;
  915. Texture2D _texture2D = new Texture2D(width, height, TextureFormat.ARGB32, false, true);
  916. RenderTexture currentRT = RenderTexture.active;
  917. RenderTexture renderTexture = RenderTexture.GetTemporary(
  918. width,
  919. height,
  920. 0,
  921. RenderTextureFormat.ARGB32,
  922. RenderTextureReadWrite.Linear);
  923. Graphics.Blit(texture, renderTexture);
  924. RenderTexture.active = renderTexture;
  925. _texture2D.ReadPixels(new Rect(0, 0, width, height), 0, 0);
  926. _texture2D.Apply();
  927. RenderTexture.active = currentRT;
  928. RenderTexture.ReleaseTemporary(renderTexture);
  929. return _texture2D;
  930. }
  931. //public void CreateUVCTexture2DFocusSizeIfNeeded(int width, int height)
  932. //{
  933. // if (mUVCTexture2D != null)
  934. // Destroy(mUVCTexture2D);
  935. // mUVCTexture2D = TextureToTexture2D(mUVCTexture, width, height);
  936. //}
  937. /// <summary>
  938. /// 使用默认的mUVCTexture宽高
  939. /// </summary>
  940. private void CreateUVCTexture2DIfNeeded()
  941. {
  942. if (mUVCTexture2D != null)
  943. Destroy(mUVCTexture2D);
  944. mUVCTexture2D = TextureToTexture2D(mUVCTexture);
  945. }
  946. /// <summary>
  947. /// 根据宽高调整mUVCTexture2D
  948. /// </summary>
  949. /// <param name="width"></param>
  950. /// <param name="height"></param>
  951. private void CreateUVCTexture2DIfNeeded(int width = 0, int height = 0)
  952. {
  953. if (mUVCTexture2D != null)
  954. Destroy(mUVCTexture2D);
  955. mUVCTexture2D = TextureToTexture2D(mUVCTexture, width, height);
  956. }
  957. #region DoubleButton
  958. private DateTime m_firstTime;
  959. private DateTime m_secondTime;
  960. private void Press()
  961. {
  962. Debug.Log("进入手动定位");
  963. BtnScreenLocateManual();
  964. resetTime();
  965. }
  966. public void OnDoubleClick()
  967. {
  968. //超时重置
  969. if (!m_firstTime.Equals(default(DateTime)))
  970. {
  971. var intervalTime = DateTime.Now - m_firstTime;
  972. float milliSeconds = intervalTime.Seconds * 1000 + intervalTime.Milliseconds;
  973. if (milliSeconds >= 400)
  974. resetTime();
  975. }
  976. // 按下按钮时对两次的时间进行记录
  977. if (m_firstTime.Equals(default(DateTime)))
  978. m_firstTime = DateTime.Now;
  979. else
  980. m_secondTime = DateTime.Now;
  981. // 在第二次点击触发,时差小于400ms触发
  982. if (!m_firstTime.Equals(default(DateTime)) && !m_secondTime.Equals(default(DateTime)))
  983. {
  984. var intervalTime = m_secondTime - m_firstTime;
  985. float milliSeconds = intervalTime.Seconds * 1000 + intervalTime.Milliseconds;
  986. if (milliSeconds < 400)
  987. Press();
  988. else
  989. resetTime();
  990. }
  991. }
  992. private void resetTime()
  993. {
  994. m_firstTime = default(DateTime);
  995. m_secondTime = default(DateTime);
  996. }
  997. #endregion
  998. #region 性能检测相关
  999. void InvalidateTimings()
  1000. {
  1001. m_ValidHistoryFrames = 0;
  1002. m_AverageTime = float.NaN;
  1003. m_MedianTime = float.NaN;
  1004. m_MinTime = float.NaN;
  1005. m_MaxTime = float.NaN;
  1006. }
  1007. void UpdateInputs()
  1008. {
  1009. //重置
  1010. if (Input.GetKeyDown(KeyCode.UpArrow))
  1011. {
  1012. InvalidateTimings();
  1013. }
  1014. }
  1015. #endregion
  1016. }