ScreenLocate.cs 44 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158
  1. #define ENABLE_LOG
  2. using InfraredManager;
  3. using o0;
  4. using SLAMUVC;
  5. using System;
  6. using System.Collections.Generic;
  7. using System.Linq;
  8. using UnityEngine;
  9. using UnityEngine.UI;
  10. using ZIM;
  11. using ZIM.Unity;
  12. using static SLAMUVC.UVCManager;
  13. using Color = UnityEngine.Color;
  14. [RequireComponent(typeof(Canvas))]
  15. public partial class ScreenLocate : MonoBehaviour
  16. {
  17. public InfraredCameraHelper InfraredCameraHelper;
  18. private const string TAG = "ScreenLocate#";
  19. enum Mode
  20. {
  21. InfraredLocate,
  22. ScreenMap,
  23. ScreenLocateManual
  24. }
  25. enum InfraredCount
  26. {
  27. Single,
  28. Double
  29. }
  30. enum Platform
  31. {
  32. Window,
  33. Android
  34. }
  35. Platform mPlatform = Platform.Android;
  36. // 2个灯,顺序根据红外灯的大小 由大到小, 坐标通过 InfraredSpot.ScreenUV 和 InfraredSpot.CameraLocation 获得
  37. public InfraredSpot[] InfraredSpots
  38. {
  39. get
  40. {
  41. infraredCount = InfraredCount.Double;
  42. return infraredSpotBuffer;
  43. }
  44. }
  45. // 1个灯, 坐标通过 InfraredSpot.ScreenUV 和 InfraredSpot.CameraLocation 获得
  46. public InfraredSpot InfraredSpotSingle
  47. {
  48. get
  49. {
  50. infraredCount = InfraredCount.Single;
  51. return infraredSpotBuffer[0];
  52. }
  53. }
  54. public InfraredSpot[] infraredSpotBuffer;
  55. InfraredCount infraredCount;
  56. public string GetInfraredCount() { return infraredCount.ToString(); }
  57. /// <summary>
  58. /// CameraLocation 的偏移量
  59. /// </summary>
  60. public Vector2 CameraLocationOffset { get; set; } = new Vector2(0, 0);
  61. public Vector2 UVOffset { get; set; } = new Vector2(0, 0);
  62. // public InfraredDemo InfraredDemoMain => FindObjectOfType<InfraredDemo>();
  63. #region UVC 处理的对象
  64. //public UVCManager mUVCManager;
  65. public CameraInfo mUVCCameraInfo;
  66. public bool getUVCCameraInfo => mUVCCameraInfo != null ? true : false;
  67. public Vector2 getUVCCameraInfoSize => getUVCCameraInfo ? mUVCCameraInfo.Size : new Vector2(320, 240);
  68. private Texture mUVCTexture;
  69. public Texture getUVCTexture => mUVCTexture;
  70. public Texture setUVCTexture {
  71. set {
  72. mUVCTexture = value;
  73. }
  74. }
  75. private Texture2D mUVCTexture2D;
  76. // [SerializeField] Texture2DArray mUVCOutArray;
  77. #endregion
  78. public Text Info;
  79. public List<RectTransform> CrosshairInCamera;
  80. public List<RectTransform> CrosshairInScreen;
  81. public RectTransform ScreenQuad;
  82. public Toggle SaveToggle;
  83. public Vector2 ScreenLocateCameraSize; // 屏幕识别需要的目标分辨率,摄像机分辨率变化时该分辨率也会跟着调整
  84. public bool ShowScreenQuad = false;
  85. public RawImage rawImage;
  86. public RawImage rawImage1;
  87. public RawImage rawImage2;
  88. public RawImage rawImage3;
  89. public RawImage rawImage4;
  90. public RawImage rawImage5;
  91. public RawImage FullScreenImage;
  92. public InfraredSpotSettings InfraredSpotSettings;
  93. //public ZIMWebCamera zimWebCamera => GetComponent<ZIMWebCamera>();
  94. public Texture2D DebugScreenImage;
  95. public bool DebugOnEditorWin = false;
  96. // private SynchronizationContext mainContext;
  97. //是否单点显示
  98. public bool bSinglePoint = true;//默认单点识别
  99. bool bIdentifyRed = true;//默认设备红色
  100. bool bIdentifyGreen = true;
  101. #region 性能检测相关
  102. public Text m_UITime;
  103. const float m_UIUpdateInterval = 0.1f;
  104. float m_UIUpdateTimer = 0.0f;
  105. List<float> m_History = new List<float>(100);
  106. int m_ValidHistoryFrames = 0;
  107. float m_AverageTime = float.NaN;
  108. float m_MedianTime = float.NaN;
  109. float m_MinTime = float.NaN;
  110. float m_MaxTime = float.NaN;
  111. public float updateInterval = 0.5F;
  112. private double lastInterval;
  113. private int frames = 0;
  114. private float fps;
  115. public Text m_FPS;
  116. #endregion
  117. #region PC部分参数
  118. //亮度
  119. public float pcBrightness { get; set; } = 0.0f;
  120. //对比度
  121. public float pcContrast { get; set; } = 0.0f;
  122. #endregion
  123. InfraredLocate infraredLocate;
  124. RectTransform canvas;
  125. Mode mode;
  126. List<Vector2> pointManual = new List<Vector2>();
  127. //o0.Project.WebCam o0WebCam = null;
  128. o0.Project.ScreenIdentification screenIdentification;
  129. public o0.Project.ScreenIdentification getScreenIdentification => screenIdentification;
  130. /// <summary>
  131. /// 正在识别的状态,自动识别时候记录
  132. /// </summary>
  133. bool bAutomaticRecognition { get; set; } = false;//进行捕获时
  134. bool bAutomaticRecognitionStart { get; set; } = false;//是否进行捕获
  135. bool bAutomaticRecognitionEnd { get; set; } = false;//是否结束捕获
  136. static public ScreenLocate Main;
  137. static public List<RawImage> DebugImage = new List<RawImage>();
  138. static public RectTransform BackQuad = null;
  139. static public void DebugTexture(int index, Texture texture)
  140. {
  141. Destroy(DebugImage[index].texture);
  142. DebugImage[index].texture = texture;
  143. }
  144. static public void SetScreen(UnityEngine.Color? color = null)
  145. {
  146. if (BackQuad == null)
  147. {
  148. var canvas = GameObject.Find("WebCameraView").GetComponent<RectTransform>();
  149. var background = canvas.Find("Background");
  150. BackQuad = background.GetChild(0).GetComponent<RectTransform>();
  151. }
  152. BackQuad.parent.gameObject.SetActive(color != null);
  153. BackQuad.GetComponent<RawImage>().color = color ?? Color.black;
  154. //Debug.Log("Set Screen " + color.GetColorName());
  155. }
  156. static public void SetScreen(Rect rect, UnityEngine.Color? color = null)
  157. {
  158. if (BackQuad == null)
  159. {
  160. var canvas = GameObject.Find("WebCameraView").GetComponent<RectTransform>();
  161. var background = canvas.Find("Background");
  162. BackQuad = background.GetChild(0).GetComponent<RectTransform>();
  163. }
  164. BackQuad.parent.gameObject.SetActive(color != null);
  165. BackQuad.anchorMin = rect.min;
  166. BackQuad.anchorMax = rect.max;
  167. BackQuad.GetComponent<RawImage>().color = color ?? Color.black;
  168. //Debug.Log("Set Screen " + color.GetColorName());
  169. }
  170. static void DebugBackQuad(Rect? rect = null)
  171. {
  172. if (BackQuad)
  173. {
  174. BackQuad.parent.GetComponent<RawImage>().enabled = false;
  175. BackQuad.GetComponent<RawImage>().color = Color.white;
  176. BackQuad.parent.gameObject.SetActive(!BackQuad.parent.gameObject.activeSelf);
  177. if (rect.HasValue)
  178. {
  179. BackQuad.anchorMin = rect.Value.min;
  180. BackQuad.anchorMax = rect.Value.max;
  181. }
  182. }
  183. }
  184. public void ReSizeTexture(int width, int height)
  185. {
  186. Debug.Log("Cur mUVCTexture Size: [" + mUVCTexture.width + "," + mUVCTexture.height + "]");
  187. return;
  188. if (mUVCTexture.width < width || mUVCTexture.height < height) // 如果当前分辨率太小,则重新new一个texture
  189. {
  190. Texture2D tex = new Texture2D(
  191. width, height,
  192. TextureFormat.ARGB32,
  193. false, /* mipmap */
  194. true /* linear */);
  195. tex.filterMode = FilterMode.Point;
  196. tex.Apply();
  197. mUVCTexture = tex;
  198. mUVCCameraInfo.previewTexture = tex;
  199. var nativeTexPtr = mUVCCameraInfo.previewTexture.GetNativeTexturePtr();
  200. }
  201. }
  202. void Awake()
  203. {
  204. Main = this;
  205. #if !UNITY_EDITOR_WIN
  206. DebugOnEditorWin = false;
  207. #endif
  208. //if (mUVCDrawer)
  209. // mUVCDrawer.StartPreviewAction += UVCIsReady;
  210. }
  211. void OnDestroy()
  212. {
  213. //if (mUVCDrawer)
  214. // mUVCDrawer.StartPreviewAction -= UVCIsReady;
  215. }
  216. void Start()
  217. {
  218. //mainContext = SynchronizationContext.Current;
  219. DebugImage.Add(rawImage);
  220. DebugImage.Add(rawImage1);
  221. DebugImage.Add(rawImage2);
  222. DebugImage.Add(rawImage3);
  223. DebugImage.Add(rawImage4);
  224. DebugImage.Add(rawImage5);
  225. DebugImage.Add(FullScreenImage);
  226. canvas = transform.GetComponent<RectTransform>();
  227. mode = Mode.InfraredLocate;
  228. if (DebugScreenImage && DebugOnEditorWin)
  229. {
  230. screenIdentification = new o0.Project.ScreenIdentification();
  231. screenIdentification.LocateScreen();
  232. }
  233. infraredCount = InfraredCount.Single;
  234. #region 性能检测相关
  235. for (var i = 0; i < m_History.Capacity; ++i)
  236. {
  237. m_History.Add(0.0f);
  238. }
  239. lastInterval = Time.realtimeSinceStartup;
  240. frames = 0;
  241. #endregion
  242. }
  243. //ZIMWebCamera场景使用
  244. public void WebCamIsReady(Texture texture)
  245. {
  246. mPlatform = Platform.Window;
  247. mUVCTexture = texture;
  248. mUVCCameraInfo = new CameraInfo(mUVCTexture);
  249. brightness = 0;
  250. //UVC准备好
  251. InfraredCameraHelper?.InvokeOnUVCIsReady(mUVCCameraInfo);
  252. }
  253. /// <summary>
  254. /// UVCManager 创建初始化时候,更新此函数
  255. /// </summary>
  256. /// <param name="cameraInfo"></param>
  257. public void UVCIsReady(CameraInfo cameraInfo)
  258. {
  259. mPlatform = Platform.Android;
  260. mUVCTexture = cameraInfo.previewTexture;
  261. mUVCCameraInfo = cameraInfo;
  262. Debug.Log("UVCIsReady:" + mUVCCameraInfo);
  263. //UVC准备好
  264. InfraredCameraHelper?.InvokeOnUVCIsReady(mUVCCameraInfo);
  265. }
  266. /// <summary>
  267. /// 获取新的 previewTexture
  268. /// </summary>
  269. public void UVCUpdate(bool bChange)
  270. {
  271. mUVCTexture = mUVCCameraInfo.previewTexture;
  272. Debug.Log("[ScreenLocate] UVCUpdate:" + mUVCCameraInfo + ",bChange:"+bChange);
  273. InfraredCameraHelper?.InvokeOnUVCIsUpdate();
  274. //这里判断是否进入自动识别?
  275. if (bAutomaticRecognitionStart) {
  276. bAutomaticRecognitionStart = false;
  277. Debug.Log("[ScreenLocate] UVCUpdate 开始自动识别 Capture:" + Capture + " ,Delay: " + Delay);
  278. screenIdentification.LocateScreen(Capture, Delay);
  279. }
  280. if (bAutomaticRecognitionEnd) {
  281. bAutomaticRecognitionEnd = false;
  282. Debug.Log("[ScreenLocate] UVCUpdate 结束捕获,当前摄像机分辨率为: " + mUVCCameraInfo.Size);
  283. ScreenLocateCameraSize = mUVCCameraInfo.Size;
  284. bAutomaticRecognition = false;
  285. }
  286. }
  287. int brightness = 0;
  288. /// <summary>
  289. /// 设置算法红外灯的亮度值
  290. /// </summary>
  291. /// <param name="value"></param>
  292. public void SetInfraredLocateBrightnessThreshold(float value)
  293. {
  294. if (infraredLocate != null)
  295. {
  296. if (value >= 0 && value <= 1)
  297. infraredLocate.SetBrightnessThreshold(value); // 参数是 红外灯的亮度阈值,阈值越小能够检测到的亮度就越低,默认值是0.93
  298. }
  299. }
  300. void Update()
  301. {
  302. //++frames;
  303. //float timeNow = Time.realtimeSinceStartup;
  304. //if (timeNow > lastInterval + updateInterval)
  305. //{
  306. // fps = (float)(frames / (timeNow - lastInterval));
  307. // frames = 0;
  308. // lastInterval = timeNow;
  309. //}
  310. //if (m_FPS != null)
  311. // m_FPS.text = "FPS:" + fps.ToString("f2");
  312. if (mUVCCameraInfo == null) return;
  313. if (screenIdentification == null)
  314. {
  315. screenIdentification = new o0.Project.ScreenIdentification();
  316. //pc 不切换分辨率了
  317. #if UNITY_ANDROID
  318. //screenIdentification.OnLocateScreenEnter += OnLocateScreenEnter;
  319. screenIdentification.OnLocateScreenEnd += OnLocateScreenEnd;
  320. #endif
  321. }
  322. if (infraredLocate == null)
  323. {
  324. infraredLocate = new InfraredLocate(mUVCCameraInfo, screenIdentification, InfraredSpotSettings);
  325. //InfraredDemo 初始化
  326. //float redfilterValue = PlayerPrefs.GetFloat("Init redFilterSliderValue", 0.8f);
  327. //Debug.Log("Init Red filterValue:" + redfilterValue);
  328. //infraredLocate.SetBrightnessThreshold(redfilterValue); // 参数是 红外灯的亮度阈值,阈值越小能够检测到的亮度就越低,默认值是0.93
  329. }
  330. if (screenIdentification.Screen.RefreshCameraSize(getUVCCameraInfoSize)) // 同步分辨率, 分辨率变化后还需同步到InfraredDemo
  331. {
  332. quadUnityVectorList = screenIdentification.Screen.QuadInCamera.GetUnityVertexList();
  333. if (!ContainsNaN(quadUnityVectorList))
  334. {
  335. SaveScreenLocateVectorList();
  336. //SyncInfraredDemo();
  337. //SyncInfraredScreenPositioningView();
  338. InfraredCameraHelper?.InvokeOnUVCPosUpdate(quadUnityVectorList);
  339. Debug.Log("[ScreenLocate] RefreshCameraSize 屏幕size改变:[" + (int)getUVCCameraInfoSize.x + "," + (int)getUVCCameraInfoSize.y + "]");
  340. Debug.Log("[ScreenLocate] RefreshCameraSize 屏幕size改变,刷新quadUnityVectorList:" + PrintVector2List(quadUnityVectorList));
  341. }
  342. else {
  343. Debug.LogError("[ScreenLocate] RefreshCameraSize 屏幕size改变,存在NaN值,重新校准:" + PrintVector2List(quadUnityVectorList));
  344. }
  345. if (DebugOnEditorWin)
  346. Main.ShowScreen(Main.ScreenQuad, screenIdentification.Screen.QuadInCamera);
  347. }
  348. if (mode == Mode.ScreenLocateManual)
  349. {
  350. //if (Input.GetMouseButtonDown(0))
  351. //{
  352. // var mouse = Input.mousePosition;
  353. // var u = mouse.x / Screen.width;
  354. // var v = mouse.y / Screen.height;
  355. // u = Math.Clamp(u, 0, 1);
  356. // v = Math.Clamp(v, 0, 1);
  357. // pointManual.Add(new Vector2(u * mUVCTexture.width, v * mUVCTexture.height));
  358. // var obj = Instantiate(Resources.Load("Point")) as GameObject;
  359. // obj.transform.SetParent(FullScreenImage.transform);
  360. // obj.transform.localPosition = new Vector2(u, v).pixelToLocalPosition_AnchorCenter(new Vector2(1, 1), FullScreenImage.rectTransform.rect);
  361. // if (pointManual.Count == 1)
  362. // Info.text = "左键单击屏幕 右下角";
  363. // else if (pointManual.Count == 2)
  364. // Info.text = "左键单击屏幕 右上角";
  365. // else if (pointManual.Count == 3)
  366. // Info.text = "左键单击屏幕 左上角";
  367. // else if (pointManual.Count == 4)
  368. // {
  369. // screenIdentification.LocateScreenManual(new OrdinalQuadrilateral(pointManual[0].o0Vector(), pointManual[1].o0Vector(), pointManual[3].o0Vector(), pointManual[2].o0Vector()));
  370. // pointManual.Clear();
  371. // ShowScreen(screenIdentification.Screen.Quad);
  372. // foreach (Transform i in FullScreenImage.transform)
  373. // Destroy(i.gameObject);
  374. // ToMode(Mode.InfraredLocate);
  375. // }
  376. //}
  377. return;
  378. }
  379. //var t0 = Time.realtimeSinceStartup;
  380. /* New*/
  381. //Debug.Log((mUVCCameraInfo != null) +" = "+ mUVCCameraInfo.IsPreviewing + " = "+ screenIdentification.Screen.Active);
  382. if (mUVCCameraInfo != null && mUVCCameraInfo.IsPreviewing) // 成功定位屏幕后才做红外识别
  383. {
  384. //if (bAutomaticRecognition)
  385. //{
  386. // //识别的过程使用的分辨率
  387. // //CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  388. // if (log1)
  389. // {
  390. // log1 = false;
  391. // Debug.Log("[ScreenLocate] log1:[" + (int)getUVCCameraInfoSize.x + ", " + (int)getUVCCameraInfoSize.y + "]");
  392. // }
  393. //}
  394. //else
  395. //{
  396. // //自动识别完成后使用相机分辨率大小 getUVCCameraInfoSize
  397. // //CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  398. // if (log2)
  399. // {
  400. // log2 = false;
  401. // Debug.Log("[ScreenLocate] log2:[" + (int)getUVCCameraInfoSize.x + ", " + (int)getUVCCameraInfoSize.y + "]");
  402. // }
  403. //}
  404. //如果是连接了蓝牙设备,并且不是9轴设备。不进行识别算法处理
  405. //if (BluetoothAim.ins?.status == BluetoothStatusEnum.ConnectSuccess && AimHandler.ins && AimHandler.ins.bRuning9Axis()) return;
  406. //根据getUVCCameraInfoSize 分辨率渲染
  407. CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  408. if (!screenIdentification.Update(mUVCTexture2D))
  409. {
  410. if (!screenIdentification.Screen.Active)
  411. {
  412. //DebugTexture(1, mUVCTexture2D.zimAutoLightSimple());
  413. return;
  414. }
  415. //if (mUVCCameraInfo.Size != ScreenLocateCameraSize) // 摄像机分辨率发生改变时执行
  416. //{
  417. // RefreshScreenQuad(mUVCCameraInfo.Size);
  418. // return;
  419. //}
  420. if (mode == Mode.InfraredLocate)
  421. {
  422. //0,0, cameraTexture2D.width, cameraTexture2D.height,0
  423. var pixels = mUVCTexture2D.GetPixels(); // 从左往右、从下往上
  424. //InfraredSpots = infraredLocate.Update(pixels);
  425. if (bSinglePoint)
  426. infraredSpotBuffer = infraredLocate.UpdateSingle(pixels);
  427. else
  428. infraredSpotBuffer = infraredLocate.Update(pixels);
  429. if (mPlatform == Platform.Window) //渲染ui上面的点。进入游戏可以隐藏
  430. {
  431. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  432. {
  433. if (infraredSpotBuffer[i].CameraLocation != null)
  434. {
  435. // 检测到光点
  436. var posInCanvas = infraredSpotBuffer[i].CameraLocation.Value.pixelToLocalPosition_AnchorCenter(mUVCCameraInfo.Size, rawImage.rectTransform.rect);
  437. CrosshairInCamera[i].gameObject.SetActive(true);
  438. CrosshairInCamera[i].anchoredPosition = posInCanvas;
  439. }
  440. else
  441. CrosshairInCamera[i].gameObject.SetActive(false);
  442. }
  443. }
  444. //手机端使用 mPlatform == Platform.Android &&
  445. //通用,手机 和 PC
  446. if (infraredSpotBuffer.Length > 0)
  447. {
  448. int redIndex = 0;
  449. int greenIndex = 1;
  450. //仅仅第一个点显示(如果最大点出界了会闪烁)
  451. if (bSinglePoint)
  452. {
  453. redIndex = 0; //单点识别是,可以选择切换颜色
  454. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  455. {
  456. string str = "Single:";
  457. Info.text = str + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  458. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  459. onFilterPos(infraredSpotBuffer[redIndex].ScreenUV.Value);
  460. }
  461. }
  462. else
  463. {
  464. //雙點模式下選擇第一個點
  465. if (bIdentifyRed && !bIdentifyGreen)
  466. {
  467. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  468. {
  469. Info.text = "Red" + redIndex + ":" + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  470. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  471. onFilterPos2(infraredSpotBuffer[redIndex].ScreenUV.Value, redIndex);
  472. }
  473. else
  474. {
  475. Info.text = "未检测到红色最大点!";
  476. }
  477. }
  478. else if (!bIdentifyRed && bIdentifyGreen)
  479. {
  480. if (infraredSpotBuffer[greenIndex].ScreenUV != null)
  481. {
  482. Info.text = "Green:" + infraredSpotBuffer[greenIndex].ScreenUV.Value.ToString("F4");
  483. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[greenIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[greenIndex].ScreenUV.Value.y * Screen.height, 0));
  484. onFilterPos2(infraredSpotBuffer[greenIndex].ScreenUV.Value, greenIndex);
  485. }
  486. else
  487. {
  488. Info.text = "未检测到绿色点!";
  489. }
  490. }
  491. else
  492. {
  493. //两个不选择和两个全选都跑识别两个点
  494. //自動切換 检测到光点
  495. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  496. {
  497. Info.text = "Red:" + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  498. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  499. onFilterPos2(infraredSpotBuffer[redIndex].ScreenUV.Value, redIndex);
  500. }
  501. else if (infraredSpotBuffer[greenIndex].ScreenUV != null)
  502. {
  503. Info.text = "Green:" + infraredSpotBuffer[greenIndex].ScreenUV.Value.ToString("F4");
  504. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[greenIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[greenIndex].ScreenUV.Value.y * Screen.height, 0));
  505. onFilterPos2(infraredSpotBuffer[greenIndex].ScreenUV.Value, greenIndex);
  506. }
  507. else
  508. {
  509. Info.text = "未检测到点!";
  510. }
  511. }
  512. }
  513. }
  514. }
  515. else if (mode == Mode.ScreenMap && DebugOnEditorWin)
  516. {
  517. var pixels = mUVCTexture2D.GetPixels();
  518. if (infraredCount == InfraredCount.Single)
  519. infraredSpotBuffer = infraredLocate.UpdateSingle(pixels);
  520. else if (infraredCount == InfraredCount.Double)
  521. infraredSpotBuffer = infraredLocate.Update(pixels);
  522. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  523. {
  524. if (infraredSpotBuffer[i].ScreenUV != null)
  525. {
  526. // 检测到光点
  527. var posInCanvas = infraredSpotBuffer[i].ScreenUV.Value.pixelToLocalPosition_AnchorCenter(new Vector2(1, 1), canvas.rect);
  528. CrosshairInScreen[i].gameObject.SetActive(true);
  529. CrosshairInScreen[i].anchoredPosition = posInCanvas;
  530. }
  531. else
  532. CrosshairInScreen[i].gameObject.SetActive(false);
  533. }
  534. if (Input.GetKeyDown(KeyCode.Escape))
  535. ToMode(Mode.InfraredLocate);
  536. }
  537. }
  538. }
  539. //var t1 = Time.realtimeSinceStartup;
  540. //var dt = t1 - t0;
  541. //m_History[m_ValidHistoryFrames % m_History.Count] = dt;
  542. //++m_ValidHistoryFrames;
  543. //m_UIUpdateTimer += Time.deltaTime;
  544. //if (m_UIUpdateTimer >= m_UIUpdateInterval)
  545. //{
  546. // m_UIUpdateTimer = 0.0f;
  547. // if (m_ValidHistoryFrames >= m_History.Count)
  548. // {
  549. // m_ValidHistoryFrames = 0;
  550. // m_AverageTime = 0.0f;
  551. // m_MinTime = float.PositiveInfinity;
  552. // m_MaxTime = float.NegativeInfinity;
  553. // {
  554. // for (var i = 0; i < m_History.Count; i++)
  555. // {
  556. // var time = m_History[i];
  557. // m_AverageTime += time;
  558. // m_MinTime = Mathf.Min(m_MinTime, time);
  559. // m_MaxTime = Mathf.Max(m_MaxTime, time);
  560. // }
  561. // m_AverageTime /= m_History.Count;
  562. // }
  563. // {
  564. // m_History.Sort();
  565. // // Odd-length history?
  566. // if ((m_History.Count & 1) != 0)
  567. // {
  568. // m_MedianTime = m_History[m_History.Count / 2];
  569. // }
  570. // else
  571. // {
  572. // m_MedianTime = (m_History[m_History.Count / 2] + m_History[m_History.Count / 2 - 1]) / 2.0f;
  573. // }
  574. // }
  575. // }
  576. // var statistics = $"{m_History.Count} 帧样本:\naverage: {m_AverageTime * 1000.0f:F2}ms\nmedian: {m_MedianTime * 1000.0f:F2}ms\nmin: {m_MinTime * 1000.0f:F2}ms\nmax: {m_MaxTime * 1000.0f:F2}ms\n";
  577. // //Method: {m_Method} {UnityEngine.SceneManagement.SceneManager.GetActiveScene().name} |
  578. // if (m_UITime != null)
  579. // m_UITime.text = $"Cam: {mUVCCameraInfo.CurrentWidth}x{mUVCCameraInfo.CurrentHeight}{(mUVCTexture2D? ",T2D:" : "")}{(mUVCTexture2D? mUVCTexture2D.width+ "x" : "")}{(mUVCTexture2D ? mUVCTexture2D.height:"")} \nLast Frame: {dt * 1000.0f:F2}ms \n{statistics}";
  580. //}
  581. //UpdateInputs();
  582. }
  583. Vector2 targetPos = Vector2.zero;
  584. Vector2 movePos = Vector2.zero;
  585. int moveSpeed = 20;
  586. public float filterDis = 3.0f;
  587. void onFilterPos(Vector2 _vector2Pos)
  588. {
  589. //主要用于模拟九轴时候的
  590. //添加一个偏移量,使得最后输出的准心是指向正中心
  591. Vector2 np = new Vector2((_vector2Pos.x - UVOffset.x) * Screen.width, (_vector2Pos.y - UVOffset.y) * Screen.height); //_vector2Pos.pixelToLocalPosition_AnchorCenter(Vector2.one, (transform as RectTransform).rect);
  592. if (Vector2.Distance(np, targetPos) >= filterDis)
  593. {
  594. targetPos = np;
  595. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(targetPos.x, targetPos.y, 0));
  596. //Vector2 np = new Vector2(uvCenterOffset.x * Screen.width, uvCenterOffset.y * Screen.height);
  597. //point -= np;
  598. InfraredCameraHelper?.InvokeOnPositionUpdate(targetPos);
  599. }
  600. //movePos = Vector3.Lerp(movePos, targetPos, Time.deltaTime * moveSpeed);
  601. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(movePos.x, movePos.y, 0));
  602. }
  603. Vector2[] _targetPoints2 = new Vector2[] { Vector2.zero, Vector2.zero };
  604. void onFilterPos2(Vector2 _vector2Pos, int index)
  605. {
  606. Vector2 np = new Vector2((_vector2Pos.x - UVOffset.x) * Screen.width, (_vector2Pos.y - UVOffset.y) * Screen.height);
  607. if (Vector2.Distance(np, _targetPoints2[index]) >= filterDis)
  608. {
  609. _targetPoints2[index] = np;
  610. InfraredCameraHelper.InvokeOnPositionUpdate2(_targetPoints2[index], index);
  611. }
  612. }
  613. #region 自动识别
  614. int Capture = 30;
  615. int Delay = 30;
  616. Vector2 EnterResolution;
  617. // int DefaultResolutionIndex;
  618. // readonly public int HighScreenLocateResolutionIndex = 2; // 自动识别时,摄像机分辨率固定为1280 * 720 ( 对应索引是2 )
  619. public void BtnScreenLocate()
  620. {
  621. if (DebugScreenImage)
  622. {
  623. screenIdentification = new o0.Project.ScreenIdentification();
  624. WebCamIsReady(DebugScreenImage);
  625. CreateUVCTexture2DIfNeeded();
  626. }
  627. //Debug.Log("BtnScreenLocate Capture:" + Capture + " ,Delay: " + Delay);
  628. //screenIdentification.LocateScreen(Capture, Delay);
  629. OnLocateScreenEnter();
  630. }
  631. // bool log1 = false, log2 = false;
  632. public void OnLocateScreenEnter()
  633. {
  634. bAutomaticRecognition = true;
  635. bAutomaticRecognitionStart = true;
  636. screenIdentification.Screen.QuadInCamera = null;
  637. //DefaultResolutionIndex = InfraredDemoMain?.ResolutionIndex ?? 0; // 记录一下进入前的分辨率(游戏场景的分辨率,比识别时更低)
  638. //HighScreenLocateResolutionIndex = InfraredDemoMain.getTextureToResolutionNewIndex(); // index = 0
  639. // Debug.Log("[ScreenLocate] 开始捕获 DefaultResolutionIndex:" + DefaultResolutionIndex + " ,HighScreenLocateResolutionIndex:" + HighScreenLocateResolutionIndex);
  640. // InfraredDemoMain?.SetResolutionNew(HighScreenLocateResolutionIndex);
  641. EnterResolution = mUVCCameraInfo.Size;// 记录一下进入前的分辨率(游戏场景的分辨率,比识别时更低)
  642. Vector2 _HighResolution = mUVCCameraInfo.CurrentCalibrationResolution; //最高的分辨率
  643. Resize((int)_HighResolution.x, (int)_HighResolution.y);
  644. //CreateUVCTexture2DIfNeeded();
  645. // log1 = true;
  646. // log2 = true;
  647. }
  648. public void OnLocateScreenEnd()
  649. {
  650. bAutomaticRecognitionEnd = true;
  651. // 记录本次屏幕识别的分辨率(目前采用高分辨率做识别,识别结束后调回低分辨率)
  652. //InfraredDemoMain?.SetResolutionNew(DefaultResolutionIndex);
  653. Resize((int)EnterResolution.x, (int)EnterResolution.y);
  654. }
  655. /**
  656. * 修改相机的实际分辨率
  657. */
  658. public void Resize(int width, int height)
  659. {
  660. if (mUVCCameraInfo == null) return;
  661. #if UNITY_ANDROID
  662. //发送修改指令给相机实际分辨率
  663. mUVCCameraInfo.SetCameraSize(width, height);
  664. #endif
  665. #if UNITY_STANDALONE_WIN
  666. // pc todo 看看怎么处理
  667. // ResizePC(width, height);
  668. #endif
  669. //mUVCCameraInfo.SetSize(width, height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  670. Debug.Log($"[ScreenLocate] 开始修改分辨率 mUVCCameraInfo origin:[{ mUVCCameraInfo.CurrentWidth },{ mUVCCameraInfo.CurrentHeight }]=>target:[{ width },{ height }]");
  671. // if (screenIdentification.isInitLocateScreen()) screenIdentification.bStartLocateScreen = true;
  672. }
  673. /// <summary>
  674. /// pc修改分辨率
  675. /// </summary>
  676. /// <param name="width"></param>
  677. /// <param name="height"></param>
  678. public void ResizePC(int width, int height)
  679. {
  680. if (mUVCCameraInfo == null) return;
  681. //if (screenIdentification.isInitLocateScreen()) screenIdentification.bStartLocateScreen = true;
  682. // PcWebCamera pcWebCamera = GetComponent<PcWebCamera>();
  683. // if(pcWebCamera.webCamTexture == null || !pcWebCamera.webCamTexture.isPlaying) return;
  684. //StartCoroutine(ResetWebCam(pcWebCamera, width, height));
  685. mUVCCameraInfo.SetSize(width, height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  686. Debug.Log("[ScreenLocate] Resize mUVCCameraInfo.SetSize: [" + mUVCCameraInfo.CurrentWidth + "," + mUVCCameraInfo.CurrentHeight + "]");
  687. }
  688. private System.Collections.IEnumerator ResetWebCam(PcWebCamera pcWebCamera, int newWidth, int newHeight)
  689. {
  690. WebCamTexture _webCamTexture = pcWebCamera.webCamTexture;
  691. // Stop the current WebCamTexture
  692. _webCamTexture.Stop();
  693. // Trigger OnWebCamStopped event
  694. // OnWebCamStopped?.Invoke();
  695. // Wait for a short time to ensure resources are released
  696. yield return new WaitForSeconds(0.5f);
  697. // Create a new WebCamTexture with the new dimensions
  698. _webCamTexture = new WebCamTexture(newWidth, newHeight);
  699. pcWebCamera.webCamTexture = _webCamTexture;
  700. mUVCTexture = _webCamTexture;
  701. // Restart the camera
  702. yield return StartCoroutine(StartWebCam(pcWebCamera));
  703. }
  704. private System.Collections.IEnumerator StartWebCam(PcWebCamera pcWebCamera)
  705. {
  706. WebCamTexture _webCamTexture = pcWebCamera.webCamTexture;
  707. _webCamTexture.Play();
  708. // Wait until the WebCamTexture is playing
  709. while (!_webCamTexture.isPlaying)
  710. {
  711. yield return null;
  712. }
  713. // Trigger OnWebCamStarted event
  714. //OnWebCamStarted?.Invoke();
  715. mUVCCameraInfo.SetSize(_webCamTexture.width, _webCamTexture.height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  716. Debug.Log("[ScreenLocate] ResizePc mUVCCameraInfo.SetSize: [" + mUVCCameraInfo.CurrentWidth + "," + mUVCCameraInfo.CurrentHeight + "]");
  717. // if(screenIdentification.isInitLocateScreen())screenIdentification.bStartLocateScreen = true;
  718. }
  719. #endregion
  720. public void BtnScreenMap()
  721. {
  722. ToMode(Mode.ScreenMap);
  723. }
  724. //进入手动定位屏幕
  725. public void BtnScreenLocateManual()
  726. {
  727. ToMode(Mode.ScreenLocateManual);
  728. }
  729. /// <summary>
  730. /// 固定的顶点顺序: 左下,右下,左上,右上
  731. /// </summary>
  732. public static List<Vector2> quadUnityVectorList = new();
  733. /// <summary>
  734. /// 打印信息
  735. /// </summary>
  736. /// <param name="list">左下,右下,左上,右上</param>
  737. /// <returns></returns>
  738. public string PrintVector2List(List<Vector2> list)
  739. {
  740. if (screenIdentification == null || !screenIdentification.Screen.Active) return "[]";
  741. string result = "";
  742. if (list.Count == 4)
  743. {
  744. result = "左下" + list[0].ToString() + ",右下" + list[1].ToString() + ",左上" + list[2].ToString() + ",右上" + list[3].ToString();
  745. }
  746. else
  747. {
  748. result = "count != 4 error";
  749. }
  750. //foreach (Vector2 vector in list)
  751. //{
  752. // result += vector.ToString() + " ";
  753. //}
  754. //Debug.Log(result);
  755. return result;
  756. }
  757. /// <summary>
  758. /// 判断是否存在NaN
  759. /// </summary>
  760. /// <param name="vectors"></param>
  761. /// <returns></returns>
  762. public bool ContainsNaN(List<Vector2> vectors)
  763. {
  764. foreach (var v in vectors)
  765. {
  766. if (float.IsNaN(v.x) || float.IsNaN(v.y))
  767. {
  768. return true;
  769. }
  770. }
  771. return false;
  772. }
  773. // 标记屏幕的四个角, ScreenQuadObject 下挂了4个子节点用于标记
  774. public void ShowScreen(RectTransform ScreenQuadObject, QuadrilateralInCamera screen)
  775. {
  776. if (screen == null)
  777. {
  778. Info.text = "识别屏幕失败";
  779. return;
  780. }
  781. Info.text = "已识别到屏幕";
  782. if (ScreenQuadObject && ScreenQuadObject.childCount >= 4)
  783. {
  784. ScreenQuadObject.gameObject.SetActive(true);
  785. for (int i = 0; i < 4; i++)
  786. {
  787. if (DebugOnEditorWin)
  788. {
  789. RectTransform t = ScreenQuadObject.GetChild(i) as RectTransform;
  790. t.anchoredPosition = screen.Quad[i].pixelToLocalPosition_AnchorCenter(screen.CameraSize, ScreenQuadObject.rect);
  791. }
  792. }
  793. }
  794. quadUnityVectorList = screen.GetUnityVertexList(); // 记录四个点
  795. if (!ContainsNaN(quadUnityVectorList))
  796. {
  797. SaveScreenLocateVectorList();
  798. //SyncInfraredDemo();
  799. //SyncInfraredScreenPositioningView();
  800. InfraredCameraHelper?.InvokeOnUVCPosUpdate(quadUnityVectorList);
  801. Debug.Log("[ScreenLocate] ShowScreen 已识别到屏幕,更新quadUnityVectorList:" + PrintVector2List(quadUnityVectorList));
  802. }
  803. else
  804. {
  805. Debug.LogError("[ScreenLocate] RefreshCameraSize 屏幕size改变,存在NaN值,重新校准:" + PrintVector2List(quadUnityVectorList));
  806. }
  807. }
  808. public void ShowScreen(QuadrilateralInCamera screen) => ShowScreen(ScreenQuad, screen);
  809. /// <summary>
  810. /// 校准点位置存储到本地
  811. /// </summary>
  812. static public void SaveScreenLocateVectorList()
  813. {
  814. string saveStr = string.Join(";", quadUnityVectorList.Select(v => $"{v.x},{v.y}")); //,{v.z}
  815. Debug.Log("SaveScreenLocateVectorList: " + saveStr);
  816. PlayerPrefs.SetString("ScreenLocateVectorList", saveStr);
  817. }
  818. /// <summary>
  819. /// 获取本地存储校准点位置
  820. /// </summary>
  821. static public bool GetScreenLocateVectorList()
  822. {
  823. string posListStr = PlayerPrefs.GetString("ScreenLocateVectorList", "");
  824. Debug.Log("GetScreenLocateVectorList:"+ posListStr);
  825. if (!string.IsNullOrWhiteSpace(posListStr))
  826. {
  827. quadUnityVectorList.Clear();
  828. quadUnityVectorList = posListStr.Split(';')
  829. .Select(s =>
  830. {
  831. string[] parts = s.Split(',');
  832. return new Vector2(float.Parse(parts[0]), float.Parse(parts[1]));
  833. })
  834. .ToList();
  835. return true;
  836. }
  837. else return false;
  838. }
  839. public Vector2 AdjustPointsOffset(Vector2 inputPoint,string type = "CameraLocation")
  840. {
  841. // 计算从原始中心到输入点的偏移量
  842. if (type == "CameraLocation")
  843. {
  844. CameraLocationOffset = inputPoint - screenIdentification.Screen.TransformToCamera(new Vector2(0.5f, 0.5f) * screenIdentification.Screen.UVSize);
  845. return CameraLocationOffset;
  846. }
  847. else {
  848. //ScreenUV
  849. UVOffset = inputPoint - new Vector2(0.5f, 0.5f);
  850. return UVOffset;
  851. }
  852. }
  853. /// <summary>
  854. /// 这里计算一个偏移后的cameraLocatoin位置
  855. /// </summary>
  856. /// <param name="cameraLocatoin"></param>
  857. /// <returns></returns>
  858. public Vector2 GetOffsetCameraLocation(Vector2 cameraLocatoin) {
  859. return cameraLocatoin - CameraLocationOffset;
  860. }
  861. void ToMode(Mode mode)
  862. {
  863. if (this.mode == mode)
  864. return;
  865. if (mode == Mode.ScreenMap)
  866. {
  867. if (!screenIdentification.Screen.Active)
  868. {
  869. Info.text = "先定位屏幕";
  870. return;
  871. }
  872. Info.text = "按ESC退出";
  873. SetScreen(Color.black);
  874. Info.transform.SetAsLastSibling();
  875. this.mode = Mode.ScreenMap;
  876. }
  877. else if (mode == Mode.InfraredLocate)
  878. {
  879. Info.text = screenIdentification.Screen.Active ? "已定位屏幕" : "定位屏幕失败";
  880. //Info.text = "已识别到屏幕";
  881. SetScreen(null);
  882. foreach (var i in CrosshairInScreen)
  883. i.gameObject.SetActive(false);
  884. FullScreenImage.gameObject.SetActive(false);
  885. Info.transform.SetSiblingIndex(transform.childCount - 4);
  886. this.mode = Mode.InfraredLocate;
  887. DebugTexture(6, null);
  888. //DebugTexture(1, null); //null
  889. // rawImage1.texture = null;
  890. #if (!NDEBUG && DEBUG && ENABLE_LOG)
  891. Console.WriteLine($"{TAG} Mode.InfraredLocate:已识别到屏幕:{screenIdentification.Screen.Active}");
  892. #endif
  893. }
  894. else if (mode == Mode.ScreenLocateManual)
  895. {
  896. Info.text = "左键单击屏幕 左下角";
  897. FullScreenImage.gameObject.SetActive(true);
  898. Info.transform.SetSiblingIndex(transform.childCount - 1);
  899. // var newTex = WebCamera.webCamTexture.AutoLight(10);
  900. //DebugTexture(1, TextureToTexture2D(rawImage.texture));
  901. CreateUVCTexture2DIfNeeded();
  902. DebugTexture(6, mUVCTexture2D.zimAutoLight(brightness));
  903. //mUVCTexture2DTemp = TextureToTexture2D(mUVCCameraInfo.previewTexture);
  904. //DebugTexture(6, mUVCTexture2DTemp.zimAutoLight(brightness));
  905. this.mode = Mode.ScreenLocateManual;
  906. }
  907. }
  908. private Texture2D TextureToTexture2D(Texture texture, int width = 0, int height = 0)
  909. {
  910. if (width == 0)
  911. width = texture.width;
  912. if (height == 0)
  913. height = texture.height;
  914. Texture2D _texture2D = new Texture2D(width, height, TextureFormat.ARGB32, false, true);
  915. RenderTexture currentRT = RenderTexture.active;
  916. RenderTexture renderTexture = RenderTexture.GetTemporary(
  917. width,
  918. height,
  919. 0,
  920. RenderTextureFormat.ARGB32,
  921. RenderTextureReadWrite.Linear);
  922. Graphics.Blit(texture, renderTexture);
  923. RenderTexture.active = renderTexture;
  924. _texture2D.ReadPixels(new Rect(0, 0, width, height), 0, 0);
  925. _texture2D.Apply();
  926. RenderTexture.active = currentRT;
  927. RenderTexture.ReleaseTemporary(renderTexture);
  928. return _texture2D;
  929. }
  930. //public void CreateUVCTexture2DFocusSizeIfNeeded(int width, int height)
  931. //{
  932. // if (mUVCTexture2D != null)
  933. // Destroy(mUVCTexture2D);
  934. // mUVCTexture2D = TextureToTexture2D(mUVCTexture, width, height);
  935. //}
  936. /// <summary>
  937. /// 使用默认的mUVCTexture宽高
  938. /// </summary>
  939. private void CreateUVCTexture2DIfNeeded()
  940. {
  941. if (mUVCTexture2D != null)
  942. Destroy(mUVCTexture2D);
  943. mUVCTexture2D = TextureToTexture2D(mUVCTexture);
  944. }
  945. /// <summary>
  946. /// 根据宽高调整mUVCTexture2D
  947. /// </summary>
  948. /// <param name="width"></param>
  949. /// <param name="height"></param>
  950. private void CreateUVCTexture2DIfNeeded(int width = 0, int height = 0)
  951. {
  952. if (mUVCTexture2D != null)
  953. Destroy(mUVCTexture2D);
  954. mUVCTexture2D = TextureToTexture2D(mUVCTexture, width, height);
  955. }
  956. #region DoubleButton
  957. private DateTime m_firstTime;
  958. private DateTime m_secondTime;
  959. private void Press()
  960. {
  961. Debug.Log("进入手动定位");
  962. BtnScreenLocateManual();
  963. resetTime();
  964. }
  965. public void OnDoubleClick()
  966. {
  967. //超时重置
  968. if (!m_firstTime.Equals(default(DateTime)))
  969. {
  970. var intervalTime = DateTime.Now - m_firstTime;
  971. float milliSeconds = intervalTime.Seconds * 1000 + intervalTime.Milliseconds;
  972. if (milliSeconds >= 400)
  973. resetTime();
  974. }
  975. // 按下按钮时对两次的时间进行记录
  976. if (m_firstTime.Equals(default(DateTime)))
  977. m_firstTime = DateTime.Now;
  978. else
  979. m_secondTime = DateTime.Now;
  980. // 在第二次点击触发,时差小于400ms触发
  981. if (!m_firstTime.Equals(default(DateTime)) && !m_secondTime.Equals(default(DateTime)))
  982. {
  983. var intervalTime = m_secondTime - m_firstTime;
  984. float milliSeconds = intervalTime.Seconds * 1000 + intervalTime.Milliseconds;
  985. if (milliSeconds < 400)
  986. Press();
  987. else
  988. resetTime();
  989. }
  990. }
  991. private void resetTime()
  992. {
  993. m_firstTime = default(DateTime);
  994. m_secondTime = default(DateTime);
  995. }
  996. #endregion
  997. #region 性能检测相关
  998. void InvalidateTimings()
  999. {
  1000. m_ValidHistoryFrames = 0;
  1001. m_AverageTime = float.NaN;
  1002. m_MedianTime = float.NaN;
  1003. m_MinTime = float.NaN;
  1004. m_MaxTime = float.NaN;
  1005. }
  1006. void UpdateInputs()
  1007. {
  1008. //重置
  1009. if (Input.GetKeyDown(KeyCode.UpArrow))
  1010. {
  1011. InvalidateTimings();
  1012. }
  1013. }
  1014. #endregion
  1015. }