ScreenLocate.cs 44 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179
  1. #define ENABLE_LOG
  2. using InfraredManager;
  3. using o0;
  4. using SLAMUVC;
  5. using System;
  6. using System.Collections;
  7. using System.Collections.Generic;
  8. using System.Linq;
  9. using UnityEngine;
  10. using UnityEngine.Experimental.AI;
  11. using UnityEngine.UI;
  12. using ZIM;
  13. using ZIM.Unity;
  14. using static SLAMUVC.UVCManager;
  15. using Color = UnityEngine.Color;
  16. using Time = UnityEngine.Time;
  17. [RequireComponent(typeof(Canvas))]
  18. public partial class ScreenLocate : MonoBehaviour
  19. {
  20. public InfraredCameraHelper InfraredCameraHelper;
  21. private const string TAG = "ScreenLocate#";
  22. public enum InfraredCount : int
  23. {
  24. Single = 1,
  25. Double = 2
  26. }
  27. enum Mode
  28. {
  29. InfraredLocate,
  30. ScreenMap,
  31. ScreenLocateManual
  32. }
  33. enum Platform
  34. {
  35. Window,
  36. Android
  37. }
  38. Platform mPlatform = Platform.Android;
  39. // 2个灯,顺序根据红外灯的大小 由大到小, 坐标通过 InfraredSpot.ScreenUV 和 InfraredSpot.CameraLocation 获得
  40. public InfraredSpot[] InfraredSpots
  41. {
  42. get
  43. {
  44. infraredCount = InfraredCount.Double;
  45. return infraredSpotBuffer;
  46. }
  47. }
  48. // 1个灯, 坐标通过 InfraredSpot.ScreenUV 和 InfraredSpot.CameraLocation 获得
  49. public InfraredSpot InfraredSpotSingle
  50. {
  51. get
  52. {
  53. infraredCount = InfraredCount.Single;
  54. return infraredSpotBuffer[0];
  55. }
  56. }
  57. public InfraredSpot[] infraredSpotBuffer;
  58. public string GetInfraredCount() { return infraredCount.ToString(); }
  59. /// <summary>
  60. /// CameraLocation 的偏移量
  61. /// </summary>
  62. public Vector2 CameraLocationOffset { get; set; } = new Vector2(0, 0);
  63. public Vector2 UVOffset { get; set; } = new Vector2(0, 0);
  64. // public InfraredDemo InfraredDemoMain => FindObjectOfType<InfraredDemo>();
  65. #region UVC 处理的对象
  66. //public UVCManager mUVCManager;
  67. public CameraInfo mUVCCameraInfo;
  68. public bool getUVCCameraInfo => mUVCCameraInfo != null ? true : false;
  69. public Vector2 getUVCCameraInfoSize => getUVCCameraInfo ? mUVCCameraInfo.Size : new Vector2(320, 240);
  70. private Texture mUVCTexture;
  71. public Texture getUVCTexture => mUVCTexture;
  72. public Texture setUVCTexture {
  73. set {
  74. mUVCTexture = value;
  75. }
  76. }
  77. private Texture2D mUVCTexture2D;
  78. // [SerializeField] Texture2DArray mUVCOutArray;
  79. #endregion
  80. public Text Info;
  81. public List<RectTransform> CrosshairInCamera;
  82. public List<RectTransform> CrosshairInScreen;
  83. public RectTransform ScreenQuad;
  84. public Toggle SaveToggle;
  85. public Toggle FullScreenToggle;
  86. public LineGenerator UILineGenerator;
  87. public bool ShowScreenQuad = false;
  88. // output的图像
  89. // 图0是摄像机原图,图1是屏幕识别的全部可选线段,图2是识别出的屏幕画面,图3是识别出的屏幕四条边,图4是图2和图3的叠加,图5显示3种不同颜色的算法识别线段
  90. public List<RawImage> outputRawImages;
  91. [NonSerialized] public Texture[] outputTexture2D = new Texture[8];
  92. public RawImage FullScreenImage;
  93. public PixelCheaker ScreenPixelCheaker;
  94. public InfraredSpotSettings InfraredSpotSettings;
  95. public o0.Geometry2D.Vector<int> CameraSize { get; set; }
  96. public Texture2D DebugScreenImage;
  97. public bool DebugOnZIMDemo = false;
  98. // private SynchronizationContext mainContext;
  99. //是否单点显示
  100. public bool bSinglePoint = true;//默认单点识别
  101. [NonSerialized] public float ReDoLocateCalibrationRatio = 0.04f; // 重复定位时校准的距离比例,例如先手动定位,再自动定位,会以手动的结果来校准
  102. [NonSerialized] public InfraredCount infraredCount = InfraredCount.Single;
  103. bool bIdentifyRed = true;//默认设备红色
  104. bool bIdentifyGreen = true;
  105. #region 性能检测相关
  106. public Text m_UITime;
  107. const float m_UIUpdateInterval = 0.1f;
  108. float m_UIUpdateTimer = 0.0f;
  109. List<float> m_History = new List<float>(100);
  110. int m_ValidHistoryFrames = 0;
  111. float m_AverageTime = float.NaN;
  112. float m_MedianTime = float.NaN;
  113. float m_MinTime = float.NaN;
  114. float m_MaxTime = float.NaN;
  115. public float updateInterval = 0.5F;
  116. private double lastInterval;
  117. private int frames = 0;
  118. private float fps;
  119. public Text m_FPS;
  120. #endregion
  121. #region PC部分参数
  122. //亮度
  123. public float pcBrightness { get; set; } = 0.0f;
  124. //对比度
  125. public float pcContrast { get; set; } = 0.0f;
  126. #endregion
  127. InfraredLocate infraredLocate;
  128. RectTransform canvas;
  129. Mode mode;
  130. List<(Vector2 pos, GameObject go)> pointManual = new List<(Vector2, GameObject)>();
  131. //o0.Project.WebCam o0WebCam = null;
  132. o0.Project.ScreenIdentification screenIdentification;
  133. public o0.Project.ScreenIdentification ScreenIdentification => screenIdentification;
  134. /// <summary>
  135. /// 正在识别的状态,自动识别时候记录
  136. /// </summary>
  137. bool bAutomaticRecognition { get; set; } = false;//进行捕获时
  138. bool bAutomaticRecognitionStart { get; set; } = false;//是否进行捕获
  139. bool bAutomaticRecognitionEnd { get; set; } = false;//是否结束捕获
  140. [NonSerialized] public RectTransform BackQuad = null;
  141. static public ScreenLocate Main;
  142. static public void AutoLightPixels(Color[] pixels, int width, int height)
  143. {
  144. if (Main.DebugOnZIMDemo) return;
  145. var newTex = pixels.zimAutoLightSimple(width, height);
  146. DebugTexture(7, newTex);
  147. try
  148. {
  149. Main.FullScreenImage.texture = newTex;
  150. }
  151. catch { }
  152. }
  153. static public void DebugTexture(int index, Texture texture)
  154. {
  155. if (Main.DebugOnZIMDemo) return;
  156. LateDestory(Main.outputTexture2D[index]);
  157. if (Main.outputTexture2D != null)
  158. Main.outputTexture2D[index] = texture;
  159. try
  160. {
  161. Main.outputRawImages[index].texture = texture;
  162. }
  163. catch { }
  164. }
  165. static void LateDestory(UnityEngine.Object o) => Main.StartCoroutine(Main.LateDestoryIEnum(o));
  166. static public void SetScreen(UnityEngine.Color? color = null)
  167. {
  168. if (Main.BackQuad == null)
  169. {
  170. var canvas = GameObject.Find("WebCameraView").GetComponent<RectTransform>();
  171. var background = canvas.Find("Background");
  172. Main.BackQuad = background.GetChild(0).GetComponent<RectTransform>();
  173. }
  174. Main.BackQuad.parent.gameObject.SetActive(color != null);
  175. Main.BackQuad.GetComponent<RawImage>().color = color ?? Color.black;
  176. //Debug.Log("Set Screen " + color.GetColorName());
  177. }
  178. static public void SetScreen(Rect rect, UnityEngine.Color? color = null)
  179. {
  180. if (Main.BackQuad == null)
  181. {
  182. var canvas = GameObject.Find("WebCameraView").GetComponent<RectTransform>();
  183. var background = canvas.Find("Background");
  184. Main.BackQuad = background.GetChild(0).GetComponent<RectTransform>();
  185. }
  186. Main.BackQuad.parent.gameObject.SetActive(color != null);
  187. Main.BackQuad.anchorMin = rect.min;
  188. Main.BackQuad.anchorMax = rect.max;
  189. Main.BackQuad.GetComponent<RawImage>().color = color ?? Color.black;
  190. //Debug.Log("Set Screen " + color.GetColorName());
  191. }
  192. static void DebugBackQuad(Rect? rect = null)
  193. {
  194. if (Main.BackQuad)
  195. {
  196. Main.BackQuad.parent.GetComponent<RawImage>().enabled = false;
  197. Main.BackQuad.GetComponent<RawImage>().color = Color.white;
  198. Main.BackQuad.parent.gameObject.SetActive(!Main.BackQuad.parent.gameObject.activeSelf);
  199. if (rect.HasValue)
  200. {
  201. Main.BackQuad.anchorMin = rect.Value.min;
  202. Main.BackQuad.anchorMax = rect.Value.max;
  203. }
  204. }
  205. }
  206. public void ReSizeTexture(int width, int height)
  207. {
  208. Debug.Log("Cur mUVCTexture Size: [" + mUVCTexture.width + "," + mUVCTexture.height + "]");
  209. return;
  210. if (mUVCTexture.width < width || mUVCTexture.height < height) // 如果当前分辨率太小,则重新new一个texture
  211. {
  212. Texture2D tex = new Texture2D(
  213. width, height,
  214. TextureFormat.ARGB32,
  215. false, /* mipmap */
  216. true /* linear */);
  217. tex.filterMode = FilterMode.Point;
  218. tex.Apply();
  219. mUVCTexture = tex;
  220. mUVCCameraInfo.previewTexture = tex;
  221. var nativeTexPtr = mUVCCameraInfo.previewTexture.GetNativeTexturePtr();
  222. }
  223. }
  224. void Awake()
  225. {
  226. Main = this;
  227. #if !UNITY_EDITOR_WIN
  228. DebugOnZIMDemo = false;
  229. #endif
  230. //if (mUVCDrawer)
  231. // mUVCDrawer.StartPreviewAction += UVCIsReady;
  232. }
  233. void OnDestroy()
  234. {
  235. //if (mUVCDrawer)
  236. // mUVCDrawer.StartPreviewAction -= UVCIsReady;
  237. }
  238. void Start()
  239. {
  240. //mainContext = SynchronizationContext.Current;
  241. // 设置目标帧率为60
  242. Application.targetFrameRate = 60;
  243. canvas = transform.GetComponent<RectTransform>();
  244. mode = Mode.InfraredLocate;
  245. if (DebugScreenImage && DebugOnZIMDemo)
  246. {
  247. screenIdentification = new o0.Project.ScreenIdentification();
  248. screenIdentification.LocateScreen();
  249. FullScreenToggle.onValueChanged.AddListener((i) =>
  250. {
  251. Screen.fullScreen = i;
  252. });
  253. }
  254. infraredCount = InfraredCount.Single;
  255. #region 性能检测相关
  256. for (var i = 0; i < m_History.Capacity; ++i)
  257. {
  258. m_History.Add(0.0f);
  259. }
  260. lastInterval = Time.realtimeSinceStartup;
  261. frames = 0;
  262. #endregion
  263. }
  264. IEnumerator LateDestoryIEnum(UnityEngine.Object o)
  265. {
  266. if (o)
  267. {
  268. yield return new WaitForEndOfFrame();
  269. Destroy(o);
  270. }
  271. }
  272. //ZIMWebCamera场景使用
  273. public void WebCamIsReady(Texture texture)
  274. {
  275. mPlatform = Platform.Window;
  276. mUVCTexture = texture;
  277. mUVCCameraInfo = new CameraInfo(mUVCTexture);
  278. brightness = 0;
  279. //UVC准备好
  280. InfraredCameraHelper?.InvokeOnUVCIsReady(mUVCCameraInfo);
  281. }
  282. /// <summary>
  283. /// UVCManager 创建初始化时候,更新此函数
  284. /// </summary>
  285. /// <param name="cameraInfo"></param>
  286. public void UVCIsReady(CameraInfo cameraInfo)
  287. {
  288. mPlatform = Platform.Android;
  289. mUVCTexture = cameraInfo.previewTexture;
  290. mUVCCameraInfo = cameraInfo;
  291. Debug.Log("UVCIsReady:" + mUVCCameraInfo);
  292. //UVC准备好
  293. InfraredCameraHelper?.InvokeOnUVCIsReady(mUVCCameraInfo);
  294. }
  295. /// <summary>
  296. /// 获取新的 previewTexture
  297. /// </summary>
  298. public void UVCUpdate(bool bChange)
  299. {
  300. mUVCTexture = mUVCCameraInfo.previewTexture;
  301. Debug.Log("[ScreenLocate] UVCUpdate:" + mUVCCameraInfo + ",bChange:"+bChange);
  302. InfraredCameraHelper?.InvokeOnUVCIsUpdate();
  303. //这里判断是否进入自动识别?
  304. if (bAutomaticRecognitionStart) {
  305. bAutomaticRecognitionStart = false;
  306. Debug.Log("[ScreenLocate] UVCUpdate 开始自动识别 Capture:" + Capture + " ,Delay: " + Delay);
  307. screenIdentification.LocateScreen(Capture, Delay);
  308. }
  309. if (bAutomaticRecognitionEnd) {
  310. bAutomaticRecognitionEnd = false;
  311. Debug.Log("[ScreenLocate] UVCUpdate 结束捕获,当前摄像机分辨率为: " + mUVCCameraInfo.Size);
  312. bAutomaticRecognition = false;
  313. }
  314. }
  315. int brightness = 0;
  316. /// <summary>
  317. /// 设置算法红外灯的亮度值
  318. /// </summary>
  319. /// <param name="value"></param>
  320. public void SetInfraredLocateBrightnessThreshold(float value)
  321. {
  322. if (infraredLocate != null)
  323. {
  324. if (value >= 0 && value <= 1)
  325. infraredLocate.SetBrightnessThreshold(value); // 参数是 红外灯的亮度阈值,阈值越小能够检测到的亮度就越低,默认值是0.93
  326. }
  327. }
  328. void Update()
  329. {
  330. //++frames;
  331. //float timeNow = Time.realtimeSinceStartup;
  332. //if (timeNow > lastInterval + updateInterval)
  333. //{
  334. // fps = (float)(frames / (timeNow - lastInterval));
  335. // frames = 0;
  336. // lastInterval = timeNow;
  337. //}
  338. //if (m_FPS != null)
  339. // m_FPS.text = "FPS:" + fps.ToString("f2");
  340. if (mUVCCameraInfo == null) return;
  341. if (screenIdentification == null)
  342. {
  343. screenIdentification = new o0.Project.ScreenIdentification();
  344. //pc 不切换分辨率了
  345. #if UNITY_ANDROID
  346. //screenIdentification.OnLocateScreenEnter += OnLocateScreenEnter;
  347. screenIdentification.OnLocateScreenEnd += OnLocateScreenEnd;
  348. #endif
  349. }
  350. if (infraredLocate == null)
  351. {
  352. infraredLocate = new InfraredLocate(mUVCCameraInfo, screenIdentification, InfraredSpotSettings, ScreenPixelCheaker);
  353. //InfraredDemo 初始化
  354. //float redfilterValue = PlayerPrefs.GetFloat("Init redFilterSliderValue", 0.8f);
  355. //Debug.Log("Init Red filterValue:" + redfilterValue);
  356. //infraredLocate.SetBrightnessThreshold(redfilterValue); // 参数是 红外灯的亮度阈值,阈值越小能够检测到的亮度就越低,默认值是0.93
  357. }
  358. if (screenIdentification.Screen.RefreshCameraSize(getUVCCameraInfoSize)) // 同步分辨率, 分辨率变化后还需同步到InfraredDemo
  359. {
  360. quadUnityVectorList = screenIdentification.Screen.QuadInCamera.GetUnityVertexNormalizedList();
  361. if (!ContainsNaN(quadUnityVectorList))
  362. {
  363. SaveScreenLocateVectorList();
  364. //SyncInfraredDemo();
  365. //SyncInfraredScreenPositioningView();
  366. InfraredCameraHelper?.InvokeOnUVCPosUpdate(quadUnityVectorList);
  367. Debug.Log("[ScreenLocate] RefreshCameraSize 屏幕size改变:[" + (int)getUVCCameraInfoSize.x + "," + (int)getUVCCameraInfoSize.y + "]");
  368. Debug.Log("[ScreenLocate] RefreshCameraSize 屏幕size改变,刷新quadUnityVectorList:" + PrintVector2List(quadUnityVectorList));
  369. }
  370. else {
  371. Debug.LogError("[ScreenLocate] RefreshCameraSize 屏幕size改变,存在NaN值,重新校准:" + PrintVector2List(quadUnityVectorList));
  372. }
  373. if (DebugOnZIMDemo)
  374. Main.ShowScreen(Main.ScreenQuad, screenIdentification.Screen.QuadInCamera);
  375. }
  376. //var t0 = Time.realtimeSinceStartup;
  377. /* New*/
  378. //Debug.Log((mUVCCameraInfo != null) +" = "+ mUVCCameraInfo.IsPreviewing + " = "+ screenIdentification.Screen.Active);
  379. if (mUVCCameraInfo != null && mUVCCameraInfo.IsPreviewing) // 成功定位屏幕后才做红外识别
  380. {
  381. //if (bAutomaticRecognition)
  382. //{
  383. // //识别的过程使用的分辨率
  384. // //CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  385. // if (log1)
  386. // {
  387. // log1 = false;
  388. // Debug.Log("[ScreenLocate] log1:[" + (int)getUVCCameraInfoSize.x + ", " + (int)getUVCCameraInfoSize.y + "]");
  389. // }
  390. //}
  391. //else
  392. //{
  393. // //自动识别完成后使用相机分辨率大小 getUVCCameraInfoSize
  394. // //CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  395. // if (log2)
  396. // {
  397. // log2 = false;
  398. // Debug.Log("[ScreenLocate] log2:[" + (int)getUVCCameraInfoSize.x + ", " + (int)getUVCCameraInfoSize.y + "]");
  399. // }
  400. //}
  401. //如果是连接了蓝牙设备,并且不是9轴设备。不进行识别算法处理
  402. if (BluetoothAim.ins?.status == BluetoothStatusEnum.ConnectSuccess && AimHandler.ins && AimHandler.ins.bRuning9Axis()) return;
  403. //根据getUVCCameraInfoSize 分辨率渲染
  404. CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  405. if (!screenIdentification.Update(mUVCTexture2D))
  406. {
  407. CameraSize = new o0.Geometry2D.Vector<int>(mUVCTexture2D.width, mUVCTexture2D.height);
  408. var pixels = mUVCTexture2D.GetPixels(); // 从左往右、从下往上
  409. AutoLightPixels(pixels, CameraSize.x, CameraSize.y);
  410. //return;
  411. //InfraredSpots = infraredLocate.Update(pixels);
  412. if (bSinglePoint)
  413. infraredSpotBuffer = infraredLocate.UpdateSingle(pixels);
  414. else
  415. infraredSpotBuffer = infraredLocate.Update(pixels);
  416. if (mode == Mode.ScreenLocateManual)
  417. {
  418. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  419. {
  420. if (infraredSpotBuffer[i].CameraLocation != null)
  421. {
  422. // 检测到光点
  423. var posInCanvas = infraredSpotBuffer[i].CameraLocation.Value.pixelToLocalPosition_AnchorCenter(CameraSize, FullScreenImage.rectTransform.rect);
  424. CrosshairInCamera[i].gameObject.SetActive(true);
  425. CrosshairInCamera[i].anchoredPosition = posInCanvas;
  426. }
  427. else
  428. CrosshairInCamera[i].gameObject.SetActive(false);
  429. }
  430. }
  431. else if(mode == Mode.InfraredLocate)
  432. {
  433. if (mPlatform == Platform.Window) //渲染ui上面的点。进入游戏可以隐藏
  434. {
  435. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  436. {
  437. if (infraredSpotBuffer[i].CameraLocation != null)
  438. {
  439. // 检测到光点
  440. var posInCanvas = infraredSpotBuffer[i].CameraLocation.Value.pixelToLocalPosition_AnchorCenter(CameraSize, outputRawImages[0].rectTransform.rect);
  441. CrosshairInCamera[i].gameObject.SetActive(true);
  442. CrosshairInCamera[i].anchoredPosition = posInCanvas;
  443. }
  444. else
  445. CrosshairInCamera[i].gameObject.SetActive(false);
  446. }
  447. }
  448. //手机端使用 mPlatform == Platform.Android &&
  449. //通用,手机 和 PC
  450. if (infraredSpotBuffer.Length > 0)
  451. {
  452. int redIndex = 0;
  453. int greenIndex = 1;
  454. //仅仅第一个点显示(如果最大点出界了会闪烁)
  455. if (bSinglePoint)
  456. {
  457. redIndex = 0; //单点识别是,可以选择切换颜色
  458. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  459. {
  460. string str = "Single:";
  461. Info.text = str + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  462. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  463. onFilterPos(infraredSpotBuffer[redIndex].ScreenUV.Value);
  464. }
  465. }
  466. else
  467. {
  468. //雙點模式下選擇第一個點
  469. if (bIdentifyRed && !bIdentifyGreen)
  470. {
  471. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  472. {
  473. Info.text = "Red" + redIndex + ":" + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  474. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  475. onFilterPos2(infraredSpotBuffer[redIndex].ScreenUV.Value, redIndex);
  476. }
  477. else
  478. {
  479. Info.text = "未检测到红色最大点!";
  480. }
  481. }
  482. else if (!bIdentifyRed && bIdentifyGreen)
  483. {
  484. if (infraredSpotBuffer[greenIndex].ScreenUV != null)
  485. {
  486. Info.text = "Green:" + infraredSpotBuffer[greenIndex].ScreenUV.Value.ToString("F4");
  487. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[greenIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[greenIndex].ScreenUV.Value.y * Screen.height, 0));
  488. onFilterPos2(infraredSpotBuffer[greenIndex].ScreenUV.Value, greenIndex);
  489. }
  490. else
  491. {
  492. Info.text = "未检测到绿色点!";
  493. }
  494. }
  495. else
  496. {
  497. //两个不选择和两个全选都跑识别两个点
  498. //自動切換 检测到光点
  499. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  500. {
  501. Info.text = "Red:" + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  502. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  503. onFilterPos2(infraredSpotBuffer[redIndex].ScreenUV.Value, redIndex);
  504. }
  505. else if (infraredSpotBuffer[greenIndex].ScreenUV != null)
  506. {
  507. Info.text = "Green:" + infraredSpotBuffer[greenIndex].ScreenUV.Value.ToString("F4");
  508. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[greenIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[greenIndex].ScreenUV.Value.y * Screen.height, 0));
  509. onFilterPos2(infraredSpotBuffer[greenIndex].ScreenUV.Value, greenIndex);
  510. }
  511. else
  512. {
  513. Info.text = "未检测到点!";
  514. }
  515. }
  516. }
  517. }
  518. }
  519. else if (mode == Mode.ScreenMap && DebugOnZIMDemo)
  520. {
  521. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  522. {
  523. if (infraredSpotBuffer[i].ScreenUV != null)
  524. {
  525. // 检测到光点
  526. var posInCanvas = infraredSpotBuffer[i].ScreenUV.Value.pixelToLocalPosition_AnchorCenter(new Vector2(1, 1), canvas.rect);
  527. CrosshairInScreen[i].gameObject.SetActive(true);
  528. CrosshairInScreen[i].anchoredPosition = posInCanvas;
  529. }
  530. else
  531. CrosshairInScreen[i].gameObject.SetActive(false);
  532. }
  533. if (Input.GetKeyDown(KeyCode.Escape))
  534. ToMode(Mode.InfraredLocate);
  535. }
  536. }
  537. }
  538. //var t1 = Time.realtimeSinceStartup;
  539. //var dt = t1 - t0;
  540. //m_History[m_ValidHistoryFrames % m_History.Count] = dt;
  541. //++m_ValidHistoryFrames;
  542. //m_UIUpdateTimer += Time.deltaTime;
  543. //if (m_UIUpdateTimer >= m_UIUpdateInterval)
  544. //{
  545. // m_UIUpdateTimer = 0.0f;
  546. // if (m_ValidHistoryFrames >= m_History.Count)
  547. // {
  548. // m_ValidHistoryFrames = 0;
  549. // m_AverageTime = 0.0f;
  550. // m_MinTime = float.PositiveInfinity;
  551. // m_MaxTime = float.NegativeInfinity;
  552. // {
  553. // for (var i = 0; i < m_History.Count; i++)
  554. // {
  555. // var time = m_History[i];
  556. // m_AverageTime += time;
  557. // m_MinTime = Mathf.Min(m_MinTime, time);
  558. // m_MaxTime = Mathf.Max(m_MaxTime, time);
  559. // }
  560. // m_AverageTime /= m_History.Count;
  561. // }
  562. // {
  563. // m_History.Sort();
  564. // // Odd-length history?
  565. // if ((m_History.Count & 1) != 0)
  566. // {
  567. // m_MedianTime = m_History[m_History.Count / 2];
  568. // }
  569. // else
  570. // {
  571. // m_MedianTime = (m_History[m_History.Count / 2] + m_History[m_History.Count / 2 - 1]) / 2.0f;
  572. // }
  573. // }
  574. // }
  575. // var statistics = $"{m_History.Count} 帧样本:\naverage: {m_AverageTime * 1000.0f:F2}ms\nmedian: {m_MedianTime * 1000.0f:F2}ms\nmin: {m_MinTime * 1000.0f:F2}ms\nmax: {m_MaxTime * 1000.0f:F2}ms\n";
  576. // //Method: {m_Method} {UnityEngine.SceneManagement.SceneManager.GetActiveScene().name} |
  577. // if (m_UITime != null)
  578. // m_UITime.text = $"Cam: {mUVCCameraInfo.CurrentWidth}x{mUVCCameraInfo.CurrentHeight}{(mUVCTexture2D? ",T2D:" : "")}{(mUVCTexture2D? mUVCTexture2D.width+ "x" : "")}{(mUVCTexture2D ? mUVCTexture2D.height:"")} \nLast Frame: {dt * 1000.0f:F2}ms \n{statistics}";
  579. //}
  580. //UpdateInputs();
  581. }
  582. Vector2 targetPos = Vector2.zero;
  583. Vector2 movePos = Vector2.zero;
  584. int moveSpeed = 20;
  585. public float filterDis = 3.0f;
  586. void onFilterPos(Vector2 _vector2Pos)
  587. {
  588. //主要用于模拟九轴时候的
  589. //添加一个偏移量,使得最后输出的准心是指向正中心
  590. Vector2 np = new Vector2((_vector2Pos.x - UVOffset.x) * Screen.width, (_vector2Pos.y - UVOffset.y) * Screen.height); //_vector2Pos.pixelToLocalPosition_AnchorCenter(Vector2.one, (transform as RectTransform).rect);
  591. if (Vector2.Distance(np, targetPos) >= filterDis)
  592. {
  593. targetPos = np;
  594. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(targetPos.x, targetPos.y, 0));
  595. //Vector2 np = new Vector2(uvCenterOffset.x * Screen.width, uvCenterOffset.y * Screen.height);
  596. //point -= np;
  597. InfraredCameraHelper?.InvokeOnPositionUpdate(targetPos);
  598. }
  599. //movePos = Vector3.Lerp(movePos, targetPos, Time.deltaTime * moveSpeed);
  600. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(movePos.x, movePos.y, 0));
  601. }
  602. Vector2[] _targetPoints2 = new Vector2[] { Vector2.zero, Vector2.zero };
  603. void onFilterPos2(Vector2 _vector2Pos, int index)
  604. {
  605. Vector2 np = new Vector2((_vector2Pos.x - UVOffset.x) * Screen.width, (_vector2Pos.y - UVOffset.y) * Screen.height);
  606. if (Vector2.Distance(np, _targetPoints2[index]) >= filterDis)
  607. {
  608. _targetPoints2[index] = np;
  609. InfraredCameraHelper.InvokeOnPositionUpdate2(_targetPoints2[index], index);
  610. }
  611. }
  612. #region 自动识别
  613. int Capture = 30;
  614. int Delay = 30;
  615. Vector2 EnterResolution;
  616. // int DefaultResolutionIndex;
  617. // readonly public int HighScreenLocateResolutionIndex = 2; // 自动识别时,摄像机分辨率固定为1280 * 720 ( 对应索引是2 )
  618. public void BtnScreenLocate()
  619. {
  620. if (DebugScreenImage)
  621. {
  622. screenIdentification = new o0.Project.ScreenIdentification();
  623. CameraSize = new o0.Geometry2D.Vector<int>(DebugScreenImage.width, DebugScreenImage.height);
  624. WebCamIsReady(DebugScreenImage);
  625. CreateUVCTexture2DIfNeeded();
  626. }
  627. //Debug.Log("BtnScreenLocate Capture:" + Capture + " ,Delay: " + Delay);
  628. //screenIdentification.LocateScreen(Capture, Delay);
  629. OnLocateScreenEnter();
  630. }
  631. // bool log1 = false, log2 = false;
  632. public void OnLocateScreenEnter()
  633. {
  634. bAutomaticRecognition = true;
  635. bAutomaticRecognitionStart = true;
  636. ResetScreenIdentification();
  637. //DefaultResolutionIndex = InfraredDemoMain?.ResolutionIndex ?? 0; // 记录一下进入前的分辨率(游戏场景的分辨率,比识别时更低)
  638. //HighScreenLocateResolutionIndex = InfraredDemoMain.getTextureToResolutionNewIndex(); // index = 0
  639. // Debug.Log("[ScreenLocate] 开始捕获 DefaultResolutionIndex:" + DefaultResolutionIndex + " ,HighScreenLocateResolutionIndex:" + HighScreenLocateResolutionIndex);
  640. // InfraredDemoMain?.SetResolutionNew(HighScreenLocateResolutionIndex);
  641. EnterResolution = mUVCCameraInfo.Size;// 记录一下进入前的分辨率(游戏场景的分辨率,比识别时更低)
  642. Vector2 _HighResolution = mUVCCameraInfo.CurrentCalibrationResolution; //最高的分辨率
  643. Resize((int)_HighResolution.x, (int)_HighResolution.y);
  644. if (DebugOnZIMDemo)
  645. screenIdentification.LocateScreen();
  646. //CreateUVCTexture2DIfNeeded();
  647. // log1 = true;
  648. // log2 = true;
  649. }
  650. public void OnLocateScreenEnd()
  651. {
  652. bAutomaticRecognitionEnd = true;
  653. // 记录本次屏幕识别的分辨率(目前采用高分辨率做识别,识别结束后调回低分辨率)
  654. //InfraredDemoMain?.SetResolutionNew(DefaultResolutionIndex);
  655. Resize((int)EnterResolution.x, (int)EnterResolution.y);
  656. }
  657. /**
  658. * 修改相机的实际分辨率
  659. */
  660. public void Resize(int width, int height)
  661. {
  662. if (mUVCCameraInfo == null) return;
  663. #if UNITY_ANDROID
  664. //发送修改指令给相机实际分辨率
  665. mUVCCameraInfo.SetCameraSize(width, height);
  666. #endif
  667. #if UNITY_STANDALONE_WIN
  668. // pc todo 看看怎么处理
  669. // ResizePC(width, height);
  670. #endif
  671. //mUVCCameraInfo.SetSize(width, height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  672. Debug.Log($"[ScreenLocate] 开始修改分辨率 mUVCCameraInfo origin:[{ mUVCCameraInfo.CurrentWidth },{ mUVCCameraInfo.CurrentHeight }]=>target:[{ width },{ height }]");
  673. // if (screenIdentification.isInitLocateScreen()) screenIdentification.bStartLocateScreen = true;
  674. }
  675. /// <summary>
  676. /// pc修改分辨率
  677. /// </summary>
  678. /// <param name="width"></param>
  679. /// <param name="height"></param>
  680. public void ResizePC(int width, int height)
  681. {
  682. if (mUVCCameraInfo == null) return;
  683. //if (screenIdentification.isInitLocateScreen()) screenIdentification.bStartLocateScreen = true;
  684. // PcWebCamera pcWebCamera = GetComponent<PcWebCamera>();
  685. // if(pcWebCamera.webCamTexture == null || !pcWebCamera.webCamTexture.isPlaying) return;
  686. //StartCoroutine(ResetWebCam(pcWebCamera, width, height));
  687. mUVCCameraInfo.SetSize(width, height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  688. Debug.Log("[ScreenLocate] Resize mUVCCameraInfo.SetSize: [" + mUVCCameraInfo.CurrentWidth + "," + mUVCCameraInfo.CurrentHeight + "]");
  689. }
  690. private System.Collections.IEnumerator ResetWebCam(PcWebCamera pcWebCamera, int newWidth, int newHeight)
  691. {
  692. WebCamTexture _webCamTexture = pcWebCamera.webCamTexture;
  693. // Stop the current WebCamTexture
  694. _webCamTexture.Stop();
  695. // Trigger OnWebCamStopped event
  696. // OnWebCamStopped?.Invoke();
  697. // Wait for a short time to ensure resources are released
  698. yield return new WaitForSeconds(0.5f);
  699. // Create a new WebCamTexture with the new dimensions
  700. _webCamTexture = new WebCamTexture(newWidth, newHeight);
  701. pcWebCamera.webCamTexture = _webCamTexture;
  702. mUVCTexture = _webCamTexture;
  703. // Restart the camera
  704. yield return StartCoroutine(StartWebCam(pcWebCamera));
  705. }
  706. private System.Collections.IEnumerator StartWebCam(PcWebCamera pcWebCamera)
  707. {
  708. WebCamTexture _webCamTexture = pcWebCamera.webCamTexture;
  709. _webCamTexture.Play();
  710. // Wait until the WebCamTexture is playing
  711. while (!_webCamTexture.isPlaying)
  712. {
  713. yield return null;
  714. }
  715. // Trigger OnWebCamStarted event
  716. //OnWebCamStarted?.Invoke();
  717. mUVCCameraInfo.SetSize(_webCamTexture.width, _webCamTexture.height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  718. Debug.Log("[ScreenLocate] ResizePc mUVCCameraInfo.SetSize: [" + mUVCCameraInfo.CurrentWidth + "," + mUVCCameraInfo.CurrentHeight + "]");
  719. // if(screenIdentification.isInitLocateScreen())screenIdentification.bStartLocateScreen = true;
  720. }
  721. #endregion
  722. public void BtnScreenMap()
  723. {
  724. ToMode(Mode.ScreenMap);
  725. }
  726. //进入手动定位屏幕
  727. public void BtnScreenLocateManual()
  728. {
  729. ToMode(Mode.ScreenLocateManual);
  730. }
  731. // 重置屏幕识别的数据
  732. public void ResetScreenIdentification()
  733. {
  734. screenIdentification.Screen.Active = false;
  735. }
  736. /// <summary>
  737. /// 固定的顶点顺序: 左下,右下,左上,右上
  738. /// </summary>
  739. public static List<Vector2> quadUnityVectorList = new();
  740. /// <summary>
  741. /// 打印信息
  742. /// </summary>
  743. /// <param name="list">左下,右下,左上,右上</param>
  744. /// <returns></returns>
  745. public string PrintVector2List(List<Vector2> list)
  746. {
  747. if (screenIdentification == null || !screenIdentification.Screen.Active) return "[]";
  748. string result = "";
  749. if (list.Count == 4)
  750. {
  751. result = "左下" + list[0].ToString() + ",右下" + list[1].ToString() + ",左上" + list[2].ToString() + ",右上" + list[3].ToString();
  752. }
  753. else
  754. {
  755. result = "count != 4 error";
  756. }
  757. //foreach (Vector2 vector in list)
  758. //{
  759. // result += vector.ToString() + " ";
  760. //}
  761. //Debug.Log(result);
  762. return result;
  763. }
  764. /// <summary>
  765. /// 判断是否存在NaN
  766. /// </summary>
  767. /// <param name="vectors"></param>
  768. /// <returns></returns>
  769. public bool ContainsNaN(List<Vector2> vectors)
  770. {
  771. foreach (var v in vectors)
  772. {
  773. if (float.IsNaN(v.x) || float.IsNaN(v.y))
  774. {
  775. return true;
  776. }
  777. }
  778. return false;
  779. }
  780. // 标记屏幕的四个角, ScreenQuadObject 下挂了4个子节点用于标记
  781. public void ShowScreen(RectTransform ScreenQuadObject, QuadrilateralInCamera screen)
  782. {
  783. if (screen == null)
  784. {
  785. Info.text = "识别屏幕失败";
  786. return;
  787. }
  788. Info.text = "已识别到屏幕";
  789. if (ScreenQuadObject && ScreenQuadObject.childCount >= 4)
  790. {
  791. ScreenQuadObject.gameObject.SetActive(true);
  792. for (int i = 0; i < 4; i++)
  793. {
  794. if (DebugOnZIMDemo)
  795. {
  796. RectTransform t = ScreenQuadObject.GetChild(i) as RectTransform;
  797. t.anchoredPosition = screen.Quad[i].pixelToLocalPosition_AnchorCenter(screen.CameraSize, ScreenQuadObject.rect);
  798. }
  799. }
  800. }
  801. quadUnityVectorList = screen.GetUnityVertexNormalizedList(); // 记录四个点
  802. if (!ContainsNaN(quadUnityVectorList))
  803. {
  804. SaveScreenLocateVectorList();
  805. //SyncInfraredDemo();
  806. if (DebugOnZIMDemo)
  807. SyncInfraredScreenPositioningView();
  808. InfraredCameraHelper?.InvokeOnUVCPosUpdate(quadUnityVectorList);
  809. Debug.Log("[ScreenLocate] ShowScreen 已识别到屏幕,更新quadUnityVectorList:" + PrintVector2List(quadUnityVectorList));
  810. }
  811. else
  812. {
  813. Debug.LogError("[ScreenLocate] RefreshCameraSize 屏幕size改变,存在NaN值,重新校准:" + PrintVector2List(quadUnityVectorList));
  814. }
  815. }
  816. public void ShowScreen(QuadrilateralInCamera screen) => ShowScreen(ScreenQuad, screen);
  817. /// <summary>
  818. /// 校准点位置存储到本地
  819. /// </summary>
  820. static public void SaveScreenLocateVectorList()
  821. {
  822. string saveStr = string.Join(";", quadUnityVectorList.Select(v => $"{v.x},{v.y}")); //,{v.z}
  823. Debug.Log("SaveScreenLocateVectorList: " + saveStr);
  824. PlayerPrefs.SetString("ScreenLocateVectorList", saveStr);
  825. }
  826. /// <summary>
  827. /// 获取本地存储校准点位置
  828. /// </summary>
  829. static public bool GetScreenLocateVectorList()
  830. {
  831. string posListStr = PlayerPrefs.GetString("ScreenLocateVectorList", "");
  832. Debug.Log("GetScreenLocateVectorList:"+ posListStr);
  833. if (!string.IsNullOrWhiteSpace(posListStr))
  834. {
  835. quadUnityVectorList.Clear();
  836. quadUnityVectorList = posListStr.Split(';')
  837. .Select(s =>
  838. {
  839. string[] parts = s.Split(',');
  840. return new Vector2(float.Parse(parts[0]), float.Parse(parts[1]));
  841. })
  842. .ToList();
  843. return true;
  844. }
  845. else return false;
  846. }
  847. public Vector2 AdjustPointsOffset(Vector2 inputPoint,string type = "CameraLocation")
  848. {
  849. // 计算从原始中心到输入点的偏移量
  850. if (type == "CameraLocation")
  851. {
  852. CameraLocationOffset = inputPoint - screenIdentification.Screen.TransformToCamera(new Vector2(0.5f, 0.5f) * screenIdentification.Screen.UVSize);
  853. return CameraLocationOffset;
  854. }
  855. else {
  856. //ScreenUV
  857. UVOffset = inputPoint - new Vector2(0.5f, 0.5f);
  858. return UVOffset;
  859. }
  860. }
  861. /// <summary>
  862. /// 这里计算一个偏移后的cameraLocatoin位置
  863. /// </summary>
  864. /// <param name="cameraLocatoin"></param>
  865. /// <returns></returns>
  866. public Vector2 GetOffsetCameraLocation(Vector2 cameraLocatoin) {
  867. return cameraLocatoin - CameraLocationOffset;
  868. }
  869. void ToMode(Mode mode)
  870. {
  871. if (this.mode == mode)
  872. return;
  873. if (mode == Mode.ScreenMap)
  874. {
  875. if (!screenIdentification.Screen.Active)
  876. {
  877. Info.text = "先定位屏幕";
  878. return;
  879. }
  880. Info.text = "按ESC退出";
  881. SetScreen(Color.black);
  882. //Info.transform.SetAsLastSibling();
  883. this.mode = Mode.ScreenMap;
  884. }
  885. else if (mode == Mode.InfraredLocate)
  886. {
  887. Info.text = screenIdentification.Screen.Active ? "已定位屏幕" : "定位屏幕失败";
  888. //Info.text = "已识别到屏幕";
  889. SetScreen(null);
  890. foreach (var i in CrosshairInScreen)
  891. i.gameObject.SetActive(false);
  892. FullScreenImage.gameObject.SetActive(false);
  893. ScreenPixelCheaker.HideImage();
  894. //Info.transform.SetSiblingIndex(transform.childCount - 4);
  895. this.mode = Mode.InfraredLocate;
  896. #if (!NDEBUG && DEBUG && ENABLE_LOG)
  897. Console.WriteLine($"{TAG} Mode.InfraredLocate:已识别到屏幕:{screenIdentification.Screen.Active}");
  898. #endif
  899. }
  900. else if (mode == Mode.ScreenLocateManual)
  901. {
  902. Info.text = "左键单击屏幕 左下角";
  903. FullScreenImage.gameObject.SetActive(true);
  904. ScreenPixelCheaker.ShowImage();
  905. //Info.transform.SetSiblingIndex(transform.childCount - 1);
  906. // var newTex = WebCamera.webCamTexture.AutoLight(10);
  907. //DebugTexture(1, TextureToTexture2D(rawImage.texture));
  908. CreateUVCTexture2DIfNeeded();
  909. DebugTexture(7, mUVCTexture2D.zimAutoLight(brightness));
  910. //mUVCTexture2DTemp = TextureToTexture2D(mUVCCameraInfo.previewTexture);
  911. //DebugTexture(6, mUVCTexture2DTemp.zimAutoLight(brightness));
  912. this.mode = Mode.ScreenLocateManual;
  913. }
  914. }
  915. private Texture2D TextureToTexture2D(Texture texture, int width = 0, int height = 0)
  916. {
  917. if (width == 0)
  918. width = texture.width;
  919. if (height == 0)
  920. height = texture.height;
  921. Texture2D _texture2D = new Texture2D(width, height, TextureFormat.ARGB32, false, true);
  922. RenderTexture currentRT = RenderTexture.active;
  923. RenderTexture renderTexture = RenderTexture.GetTemporary(
  924. width,
  925. height,
  926. 0,
  927. RenderTextureFormat.ARGB32,
  928. RenderTextureReadWrite.Linear);
  929. Graphics.Blit(texture, renderTexture);
  930. RenderTexture.active = renderTexture;
  931. _texture2D.ReadPixels(new Rect(0, 0, width, height), 0, 0);
  932. _texture2D.Apply();
  933. RenderTexture.active = currentRT;
  934. RenderTexture.ReleaseTemporary(renderTexture);
  935. return _texture2D;
  936. }
  937. //public void CreateUVCTexture2DFocusSizeIfNeeded(int width, int height)
  938. //{
  939. // if (mUVCTexture2D != null)
  940. // Destroy(mUVCTexture2D);
  941. // mUVCTexture2D = TextureToTexture2D(mUVCTexture, width, height);
  942. //}
  943. /// <summary>
  944. /// 使用默认的mUVCTexture宽高
  945. /// </summary>
  946. private void CreateUVCTexture2DIfNeeded()
  947. {
  948. if (mUVCTexture2D != null)
  949. Destroy(mUVCTexture2D);
  950. mUVCTexture2D = TextureToTexture2D(mUVCTexture);
  951. }
  952. /// <summary>
  953. /// 根据宽高调整mUVCTexture2D
  954. /// </summary>
  955. /// <param name="width"></param>
  956. /// <param name="height"></param>
  957. private void CreateUVCTexture2DIfNeeded(int width = 0, int height = 0)
  958. {
  959. if (mUVCTexture2D != null)
  960. Destroy(mUVCTexture2D);
  961. mUVCTexture2D = TextureToTexture2D(mUVCTexture, width, height);
  962. }
  963. #region DoubleButton
  964. private DateTime m_firstTime;
  965. private DateTime m_secondTime;
  966. private void Press()
  967. {
  968. Debug.Log("进入手动定位");
  969. BtnScreenLocateManual();
  970. resetTime();
  971. }
  972. public void OnDoubleClick()
  973. {
  974. //超时重置
  975. if (!m_firstTime.Equals(default(DateTime)))
  976. {
  977. var intervalTime = DateTime.Now - m_firstTime;
  978. float milliSeconds = intervalTime.Seconds * 1000 + intervalTime.Milliseconds;
  979. if (milliSeconds >= 400)
  980. resetTime();
  981. }
  982. // 按下按钮时对两次的时间进行记录
  983. if (m_firstTime.Equals(default(DateTime)))
  984. m_firstTime = DateTime.Now;
  985. else
  986. m_secondTime = DateTime.Now;
  987. // 在第二次点击触发,时差小于400ms触发
  988. if (!m_firstTime.Equals(default(DateTime)) && !m_secondTime.Equals(default(DateTime)))
  989. {
  990. var intervalTime = m_secondTime - m_firstTime;
  991. float milliSeconds = intervalTime.Seconds * 1000 + intervalTime.Milliseconds;
  992. if (milliSeconds < 400)
  993. Press();
  994. else
  995. resetTime();
  996. }
  997. }
  998. private void resetTime()
  999. {
  1000. m_firstTime = default(DateTime);
  1001. m_secondTime = default(DateTime);
  1002. }
  1003. #endregion
  1004. #region 性能检测相关
  1005. void InvalidateTimings()
  1006. {
  1007. m_ValidHistoryFrames = 0;
  1008. m_AverageTime = float.NaN;
  1009. m_MedianTime = float.NaN;
  1010. m_MinTime = float.NaN;
  1011. m_MaxTime = float.NaN;
  1012. }
  1013. void UpdateInputs()
  1014. {
  1015. //重置
  1016. if (Input.GetKeyDown(KeyCode.UpArrow))
  1017. {
  1018. InvalidateTimings();
  1019. }
  1020. }
  1021. #endregion
  1022. }