ScreenLocate.cs 44 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175
  1. #define ENABLE_LOG
  2. using InfraredManager;
  3. using o0;
  4. using SLAMUVC;
  5. using System;
  6. using System.Collections;
  7. using System.Collections.Generic;
  8. using System.Linq;
  9. using UnityEngine;
  10. using UnityEngine.Experimental.AI;
  11. using UnityEngine.UI;
  12. using ZIM;
  13. using ZIM.Unity;
  14. using static SLAMUVC.UVCManager;
  15. using Color = UnityEngine.Color;
  16. using Time = UnityEngine.Time;
  17. [RequireComponent(typeof(Canvas))]
  18. public partial class ScreenLocate : MonoBehaviour
  19. {
  20. public InfraredCameraHelper InfraredCameraHelper;
  21. private const string TAG = "ScreenLocate#";
  22. public enum InfraredCount : int
  23. {
  24. Single = 1,
  25. Double = 2
  26. }
  27. enum Mode
  28. {
  29. InfraredLocate,
  30. ScreenMap,
  31. ScreenLocateManual
  32. }
  33. enum Platform
  34. {
  35. Window,
  36. Android
  37. }
  38. Platform mPlatform = Platform.Android;
  39. // 2个灯,顺序根据红外灯的大小 由大到小, 坐标通过 InfraredSpot.ScreenUV 和 InfraredSpot.CameraLocation 获得
  40. public InfraredSpot[] InfraredSpots
  41. {
  42. get
  43. {
  44. infraredCount = InfraredCount.Double;
  45. return infraredSpotBuffer;
  46. }
  47. }
  48. // 1个灯, 坐标通过 InfraredSpot.ScreenUV 和 InfraredSpot.CameraLocation 获得
  49. public InfraredSpot InfraredSpotSingle
  50. {
  51. get
  52. {
  53. infraredCount = InfraredCount.Single;
  54. return infraredSpotBuffer[0];
  55. }
  56. }
  57. public InfraredSpot[] infraredSpotBuffer;
  58. public string GetInfraredCount() { return infraredCount.ToString(); }
  59. /// <summary>
  60. /// CameraLocation 的偏移量
  61. /// </summary>
  62. public Vector2 CameraLocationOffset { get; set; } = new Vector2(0, 0);
  63. public Vector2 UVOffset { get; set; } = new Vector2(0, 0);
  64. // public InfraredDemo InfraredDemoMain => FindObjectOfType<InfraredDemo>();
  65. #region UVC 处理的对象
  66. //public UVCManager mUVCManager;
  67. public CameraInfo mUVCCameraInfo;
  68. public bool getUVCCameraInfo => mUVCCameraInfo != null ? true : false;
  69. public Vector2 getUVCCameraInfoSize => getUVCCameraInfo ? mUVCCameraInfo.Size : new Vector2(320, 240);
  70. private Texture mUVCTexture;
  71. public Texture getUVCTexture => mUVCTexture;
  72. public Texture setUVCTexture {
  73. set {
  74. mUVCTexture = value;
  75. }
  76. }
  77. private Texture2D mUVCTexture2D;
  78. // [SerializeField] Texture2DArray mUVCOutArray;
  79. #endregion
  80. public Text Info;
  81. public List<RectTransform> CrosshairInCamera;
  82. public List<RectTransform> CrosshairInScreen;
  83. public RectTransform ScreenQuad;
  84. public Toggle SaveToggle;
  85. public Toggle FullScreenToggle;
  86. public LineGenerator UILineGenerator;
  87. public bool ShowScreenQuad = false;
  88. // output的图像
  89. // 图0是摄像机原图,图1是屏幕识别的全部可选线段,图2是识别出的屏幕画面,图3是识别出的屏幕四条边,图4是图2和图3的叠加,图5显示3种不同颜色的算法识别线段
  90. public List<RawImage> outputRawImages;
  91. [NonSerialized] public Texture[] outputTexture2D = new Texture[8];
  92. public RawImage FullScreenImage;
  93. public PixelCheaker ScreenPixelCheaker;
  94. public InfraredSpotSettings InfraredSpotSettings;
  95. public o0.Geometry2D.Vector<int> CameraSize { get; set; }
  96. public List<Texture2D> DebugScreenImages = new List<Texture2D>();
  97. public bool DebugOnZIMDemo = false;
  98. // private SynchronizationContext mainContext;
  99. //是否单点显示
  100. public bool bSinglePoint = true;//默认单点识别
  101. [NonSerialized] public float ReDoLocateCalibrationRatio = 0.04f; // 重复定位时校准的距离比例,例如先手动定位,再自动定位,会以手动的结果来校准
  102. [NonSerialized] public InfraredCount infraredCount = InfraredCount.Single;
  103. bool bIdentifyRed = true;//默认设备红色
  104. bool bIdentifyGreen = true;
  105. #region 性能检测相关
  106. public Text m_UITime;
  107. const float m_UIUpdateInterval = 0.1f;
  108. float m_UIUpdateTimer = 0.0f;
  109. List<float> m_History = new List<float>(100);
  110. int m_ValidHistoryFrames = 0;
  111. float m_AverageTime = float.NaN;
  112. float m_MedianTime = float.NaN;
  113. float m_MinTime = float.NaN;
  114. float m_MaxTime = float.NaN;
  115. public float updateInterval = 0.5F;
  116. private double lastInterval;
  117. private int frames = 0;
  118. private float fps;
  119. public Text m_FPS;
  120. #endregion
  121. #region PC部分参数
  122. //亮度
  123. public float pcBrightness { get; set; } = 0.0f;
  124. //对比度
  125. public float pcContrast { get; set; } = 0.0f;
  126. #endregion
  127. InfraredLocate infraredLocate;
  128. RectTransform canvas;
  129. Mode mode;
  130. List<(Vector2 pos, GameObject go)> pointManual = new List<(Vector2, GameObject)>();
  131. //o0.Project.WebCam o0WebCam = null;
  132. o0.Project.ScreenIdentification screenIdentification;
  133. public o0.Project.ScreenIdentification ScreenIdentification => screenIdentification;
  134. /// <summary>
  135. /// 正在识别的状态,自动识别时候记录
  136. /// </summary>
  137. bool bAutomaticRecognition { get; set; } = false;//进行捕获时
  138. bool bAutomaticRecognitionStart { get; set; } = false;//是否进行捕获
  139. bool bAutomaticRecognitionEnd { get; set; } = false;//是否结束捕获
  140. [NonSerialized] public RectTransform BackQuad = null;
  141. static public ScreenLocate Main;
  142. static public void AutoLightPixels(Color[] pixels, int width, int height)
  143. {
  144. var newTex = pixels.zimAutoLightSimple(width, height);
  145. DebugTexture(7, newTex);
  146. try
  147. {
  148. Main.FullScreenImage.texture = newTex;
  149. }
  150. catch { }
  151. }
  152. static public void DebugTexture(int index, Texture texture)
  153. {
  154. LateDestory(Main.outputTexture2D[index]);
  155. Main.outputTexture2D[index] = texture;
  156. try
  157. {
  158. Main.outputRawImages[index].texture = texture;
  159. }
  160. catch { }
  161. }
  162. static void LateDestory(UnityEngine.Object o) => Main.StartCoroutine(Main.LateDestoryIEnum(o));
  163. static public void SetScreen(UnityEngine.Color? color = null)
  164. {
  165. if (Main.BackQuad == null)
  166. {
  167. var canvas = GameObject.Find("WebCameraView").GetComponent<RectTransform>();
  168. var background = canvas.Find("Background");
  169. Main.BackQuad = background.GetChild(0).GetComponent<RectTransform>();
  170. }
  171. Main.BackQuad.parent.gameObject.SetActive(color != null);
  172. Main.BackQuad.GetComponent<RawImage>().color = color ?? Color.black;
  173. //Debug.Log("Set Screen " + color.GetColorName());
  174. }
  175. static public void SetScreen(Rect rect, UnityEngine.Color? color = null)
  176. {
  177. if (Main.BackQuad == null)
  178. {
  179. var canvas = GameObject.Find("WebCameraView").GetComponent<RectTransform>();
  180. var background = canvas.Find("Background");
  181. Main.BackQuad = background.GetChild(0).GetComponent<RectTransform>();
  182. }
  183. Main.BackQuad.parent.gameObject.SetActive(color != null);
  184. Main.BackQuad.anchorMin = rect.min;
  185. Main.BackQuad.anchorMax = rect.max;
  186. Main.BackQuad.GetComponent<RawImage>().color = color ?? Color.black;
  187. //Debug.Log("Set Screen " + color.GetColorName());
  188. }
  189. static void DebugBackQuad(Rect? rect = null)
  190. {
  191. if (Main.BackQuad)
  192. {
  193. Main.BackQuad.parent.GetComponent<RawImage>().enabled = false;
  194. Main.BackQuad.GetComponent<RawImage>().color = Color.white;
  195. Main.BackQuad.parent.gameObject.SetActive(!Main.BackQuad.parent.gameObject.activeSelf);
  196. if (rect.HasValue)
  197. {
  198. Main.BackQuad.anchorMin = rect.Value.min;
  199. Main.BackQuad.anchorMax = rect.Value.max;
  200. }
  201. }
  202. }
  203. public void ReSizeTexture(int width, int height)
  204. {
  205. Debug.Log("Cur mUVCTexture Size: [" + mUVCTexture.width + "," + mUVCTexture.height + "]");
  206. return;
  207. if (mUVCTexture.width < width || mUVCTexture.height < height) // 如果当前分辨率太小,则重新new一个texture
  208. {
  209. Texture2D tex = new Texture2D(
  210. width, height,
  211. TextureFormat.ARGB32,
  212. false, /* mipmap */
  213. true /* linear */);
  214. tex.filterMode = FilterMode.Point;
  215. tex.Apply();
  216. mUVCTexture = tex;
  217. mUVCCameraInfo.previewTexture = tex;
  218. var nativeTexPtr = mUVCCameraInfo.previewTexture.GetNativeTexturePtr();
  219. }
  220. }
  221. void Awake()
  222. {
  223. Main = this;
  224. #if !UNITY_EDITOR_WIN
  225. DebugOnZIMDemo = false;
  226. #endif
  227. //if (mUVCDrawer)
  228. // mUVCDrawer.StartPreviewAction += UVCIsReady;
  229. }
  230. void OnDestroy()
  231. {
  232. //if (mUVCDrawer)
  233. // mUVCDrawer.StartPreviewAction -= UVCIsReady;
  234. }
  235. void Start()
  236. {
  237. //mainContext = SynchronizationContext.Current;
  238. // 设置目标帧率为60
  239. Application.targetFrameRate = 60;
  240. canvas = transform.GetComponent<RectTransform>();
  241. mode = Mode.InfraredLocate;
  242. if (DebugScreenImages.Count != 0 && DebugOnZIMDemo)
  243. {
  244. screenIdentification = new o0.Project.ScreenIdentification();
  245. screenIdentification.LocateScreen();
  246. }
  247. if (DebugOnZIMDemo) {
  248. FullScreenToggle.onValueChanged.AddListener((i) =>
  249. {
  250. Screen.fullScreen = i;
  251. });
  252. }
  253. infraredCount = InfraredCount.Single;
  254. #region 性能检测相关
  255. for (var i = 0; i < m_History.Capacity; ++i)
  256. {
  257. m_History.Add(0.0f);
  258. }
  259. lastInterval = Time.realtimeSinceStartup;
  260. frames = 0;
  261. #endregion
  262. }
  263. IEnumerator LateDestoryIEnum(UnityEngine.Object o)
  264. {
  265. if (o)
  266. {
  267. yield return new WaitForEndOfFrame();
  268. Destroy(o);
  269. }
  270. }
  271. //ZIMWebCamera场景使用
  272. public void WebCamIsReady(Texture texture)
  273. {
  274. mPlatform = Platform.Window;
  275. mUVCTexture = texture;
  276. mUVCCameraInfo = new CameraInfo(mUVCTexture);
  277. brightness = 0;
  278. //UVC准备好
  279. InfraredCameraHelper?.InvokeOnUVCIsReady(mUVCCameraInfo);
  280. }
  281. /// <summary>
  282. /// UVCManager 创建初始化时候,更新此函数
  283. /// </summary>
  284. /// <param name="cameraInfo"></param>
  285. public void UVCIsReady(CameraInfo cameraInfo)
  286. {
  287. mPlatform = Platform.Android;
  288. mUVCTexture = cameraInfo.previewTexture;
  289. mUVCCameraInfo = cameraInfo;
  290. Debug.Log("UVCIsReady:" + mUVCCameraInfo);
  291. //UVC准备好
  292. InfraredCameraHelper?.InvokeOnUVCIsReady(mUVCCameraInfo);
  293. }
  294. /// <summary>
  295. /// 获取新的 previewTexture
  296. /// </summary>
  297. public void UVCUpdate(bool bChange)
  298. {
  299. mUVCTexture = mUVCCameraInfo.previewTexture;
  300. Debug.Log("[ScreenLocate] UVCUpdate:" + mUVCCameraInfo + ",bChange:"+bChange);
  301. InfraredCameraHelper?.InvokeOnUVCIsUpdate();
  302. //这里判断是否进入自动识别?
  303. if (bAutomaticRecognitionStart) {
  304. bAutomaticRecognitionStart = false;
  305. Debug.Log("[ScreenLocate] UVCUpdate 开始自动识别 Capture:" + Capture + " ,Delay: " + Delay);
  306. screenIdentification.LocateScreen(Capture, Delay);
  307. }
  308. if (bAutomaticRecognitionEnd) {
  309. bAutomaticRecognitionEnd = false;
  310. Debug.Log("[ScreenLocate] UVCUpdate 结束捕获,当前摄像机分辨率为: " + mUVCCameraInfo.Size);
  311. bAutomaticRecognition = false;
  312. }
  313. }
  314. int brightness = 0;
  315. /// <summary>
  316. /// 设置算法红外灯的亮度值
  317. /// </summary>
  318. /// <param name="value"></param>
  319. public void SetInfraredLocateBrightnessThreshold(float value)
  320. {
  321. if (infraredLocate != null)
  322. {
  323. if (value >= 0 && value <= 1)
  324. infraredLocate.SetBrightnessThreshold(value); // 参数是 红外灯的亮度阈值,阈值越小能够检测到的亮度就越低,默认值是0.93
  325. }
  326. }
  327. void Update()
  328. {
  329. //++frames;
  330. //float timeNow = Time.realtimeSinceStartup;
  331. //if (timeNow > lastInterval + updateInterval)
  332. //{
  333. // fps = (float)(frames / (timeNow - lastInterval));
  334. // frames = 0;
  335. // lastInterval = timeNow;
  336. //}
  337. //if (m_FPS != null)
  338. // m_FPS.text = "FPS:" + fps.ToString("f2");
  339. if (mUVCCameraInfo == null) return;
  340. if (screenIdentification == null)
  341. {
  342. screenIdentification = new o0.Project.ScreenIdentification();
  343. //pc 不切换分辨率了
  344. #if UNITY_ANDROID
  345. //screenIdentification.OnLocateScreenEnter += OnLocateScreenEnter;
  346. screenIdentification.OnLocateScreenEnd += OnLocateScreenEnd;
  347. #endif
  348. }
  349. if (infraredLocate == null)
  350. {
  351. infraredLocate = new InfraredLocate(mUVCCameraInfo, screenIdentification, InfraredSpotSettings, ScreenPixelCheaker);
  352. //InfraredDemo 初始化
  353. //float redfilterValue = PlayerPrefs.GetFloat("Init redFilterSliderValue", 0.8f);
  354. //Debug.Log("Init Red filterValue:" + redfilterValue);
  355. //infraredLocate.SetBrightnessThreshold(redfilterValue); // 参数是 红外灯的亮度阈值,阈值越小能够检测到的亮度就越低,默认值是0.93
  356. }
  357. if (screenIdentification.Screen.RefreshCameraSize(getUVCCameraInfoSize)) // 同步分辨率, 分辨率变化后还需同步到InfraredDemo
  358. {
  359. quadUnityVectorList = screenIdentification.Screen.QuadInCamera.GetUnityVertexNormalizedList();
  360. if (!ContainsNaN(quadUnityVectorList))
  361. {
  362. SaveScreenLocateVectorList();
  363. //SyncInfraredDemo();
  364. //SyncInfraredScreenPositioningView();
  365. InfraredCameraHelper?.InvokeOnUVCPosUpdate(quadUnityVectorList);
  366. Debug.Log("[ScreenLocate] RefreshCameraSize 屏幕size改变:[" + (int)getUVCCameraInfoSize.x + "," + (int)getUVCCameraInfoSize.y + "]");
  367. Debug.Log("[ScreenLocate] RefreshCameraSize 屏幕size改变,刷新quadUnityVectorList:" + PrintVector2List(quadUnityVectorList));
  368. }
  369. else {
  370. Debug.LogError("[ScreenLocate] RefreshCameraSize 屏幕size改变,存在NaN值,重新校准:" + PrintVector2List(quadUnityVectorList));
  371. }
  372. if (DebugOnZIMDemo)
  373. Main.ShowScreen(Main.ScreenQuad, screenIdentification.Screen.QuadInCamera);
  374. }
  375. //var t0 = Time.realtimeSinceStartup;
  376. /* New*/
  377. //Debug.Log((mUVCCameraInfo != null) +" = "+ mUVCCameraInfo.IsPreviewing + " = "+ screenIdentification.Screen.Active);
  378. if (mUVCCameraInfo != null && mUVCCameraInfo.IsPreviewing) // 成功定位屏幕后才做红外识别
  379. {
  380. //if (bAutomaticRecognition)
  381. //{
  382. // //识别的过程使用的分辨率
  383. // //CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  384. // if (log1)
  385. // {
  386. // log1 = false;
  387. // Debug.Log("[ScreenLocate] log1:[" + (int)getUVCCameraInfoSize.x + ", " + (int)getUVCCameraInfoSize.y + "]");
  388. // }
  389. //}
  390. //else
  391. //{
  392. // //自动识别完成后使用相机分辨率大小 getUVCCameraInfoSize
  393. // //CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  394. // if (log2)
  395. // {
  396. // log2 = false;
  397. // Debug.Log("[ScreenLocate] log2:[" + (int)getUVCCameraInfoSize.x + ", " + (int)getUVCCameraInfoSize.y + "]");
  398. // }
  399. //}
  400. //如果是连接了蓝牙设备,并且不是9轴设备。不进行识别算法处理
  401. if (BluetoothAim.ins?.status == BluetoothStatusEnum.ConnectSuccess && AimHandler.ins && AimHandler.ins.bRuning9Axis()) return;
  402. //根据getUVCCameraInfoSize 分辨率渲染
  403. CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  404. if (!screenIdentification.Update(mUVCTexture2D))
  405. {
  406. CameraSize = new o0.Geometry2D.Vector<int>(mUVCTexture2D.width, mUVCTexture2D.height);
  407. var pixels = mUVCTexture2D.GetPixels(); // 从左往右、从下往上
  408. AutoLightPixels(pixels, CameraSize.x, CameraSize.y);
  409. //return;
  410. //InfraredSpots = infraredLocate.Update(pixels);
  411. if (bSinglePoint)
  412. infraredSpotBuffer = infraredLocate.UpdateSingle(pixels);
  413. else
  414. infraredSpotBuffer = infraredLocate.Update(pixels);
  415. if (mode == Mode.ScreenLocateManual)
  416. {
  417. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  418. {
  419. if (infraredSpotBuffer[i].CameraLocation != null)
  420. {
  421. // 检测到光点
  422. var posInCanvas = infraredSpotBuffer[i].CameraLocation.Value.pixelToLocalPosition_AnchorCenter(CameraSize, FullScreenImage.rectTransform.rect);
  423. CrosshairInCamera[i].gameObject.SetActive(true);
  424. CrosshairInCamera[i].anchoredPosition = posInCanvas;
  425. }
  426. else
  427. CrosshairInCamera[i].gameObject.SetActive(false);
  428. }
  429. }
  430. else if(mode == Mode.InfraredLocate)
  431. {
  432. if (mPlatform == Platform.Window) //渲染ui上面的点。进入游戏可以隐藏
  433. {
  434. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  435. {
  436. if (infraredSpotBuffer[i].CameraLocation != null)
  437. {
  438. // 检测到光点
  439. var posInCanvas = infraredSpotBuffer[i].CameraLocation.Value.pixelToLocalPosition_AnchorCenter(CameraSize, outputRawImages[0].rectTransform.rect);
  440. CrosshairInCamera[i].gameObject.SetActive(true);
  441. CrosshairInCamera[i].anchoredPosition = posInCanvas;
  442. }
  443. else
  444. CrosshairInCamera[i].gameObject.SetActive(false);
  445. }
  446. }
  447. //手机端使用 mPlatform == Platform.Android &&
  448. //通用,手机 和 PC
  449. if (infraredSpotBuffer.Length > 0)
  450. {
  451. int redIndex = 0;
  452. int greenIndex = 1;
  453. //仅仅第一个点显示(如果最大点出界了会闪烁)
  454. if (bSinglePoint)
  455. {
  456. redIndex = 0; //单点识别是,可以选择切换颜色
  457. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  458. {
  459. string str = "Single:";
  460. Info.text = str + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  461. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  462. onFilterPos(infraredSpotBuffer[redIndex].ScreenUV.Value);
  463. }
  464. }
  465. else
  466. {
  467. //雙點模式下選擇第一個點
  468. if (bIdentifyRed && !bIdentifyGreen)
  469. {
  470. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  471. {
  472. Info.text = "Red" + redIndex + ":" + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  473. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  474. onFilterPos2(infraredSpotBuffer[redIndex].ScreenUV.Value, redIndex);
  475. }
  476. else
  477. {
  478. Info.text = "未检测到红色最大点!";
  479. }
  480. }
  481. else if (!bIdentifyRed && bIdentifyGreen)
  482. {
  483. if (infraredSpotBuffer[greenIndex].ScreenUV != null)
  484. {
  485. Info.text = "Green:" + infraredSpotBuffer[greenIndex].ScreenUV.Value.ToString("F4");
  486. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[greenIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[greenIndex].ScreenUV.Value.y * Screen.height, 0));
  487. onFilterPos2(infraredSpotBuffer[greenIndex].ScreenUV.Value, greenIndex);
  488. }
  489. else
  490. {
  491. Info.text = "未检测到绿色点!";
  492. }
  493. }
  494. else
  495. {
  496. //两个不选择和两个全选都跑识别两个点
  497. //自動切換 检测到光点
  498. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  499. {
  500. Info.text = "Red:" + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  501. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  502. onFilterPos2(infraredSpotBuffer[redIndex].ScreenUV.Value, redIndex);
  503. }
  504. else if (infraredSpotBuffer[greenIndex].ScreenUV != null)
  505. {
  506. Info.text = "Green:" + infraredSpotBuffer[greenIndex].ScreenUV.Value.ToString("F4");
  507. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[greenIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[greenIndex].ScreenUV.Value.y * Screen.height, 0));
  508. onFilterPos2(infraredSpotBuffer[greenIndex].ScreenUV.Value, greenIndex);
  509. }
  510. else
  511. {
  512. Info.text = "未检测到点!";
  513. }
  514. }
  515. }
  516. }
  517. }
  518. else if (mode == Mode.ScreenMap && DebugOnZIMDemo)
  519. {
  520. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  521. {
  522. if (infraredSpotBuffer[i].ScreenUV != null)
  523. {
  524. // 检测到光点
  525. var posInCanvas = infraredSpotBuffer[i].ScreenUV.Value.pixelToLocalPosition_AnchorCenter(new Vector2(1, 1), canvas.rect);
  526. CrosshairInScreen[i].gameObject.SetActive(true);
  527. CrosshairInScreen[i].anchoredPosition = posInCanvas;
  528. }
  529. else
  530. CrosshairInScreen[i].gameObject.SetActive(false);
  531. }
  532. if (Input.GetKeyDown(KeyCode.Escape))
  533. ToMode(Mode.InfraredLocate);
  534. }
  535. }
  536. }
  537. //var t1 = Time.realtimeSinceStartup;
  538. //var dt = t1 - t0;
  539. //m_History[m_ValidHistoryFrames % m_History.Count] = dt;
  540. //++m_ValidHistoryFrames;
  541. //m_UIUpdateTimer += Time.deltaTime;
  542. //if (m_UIUpdateTimer >= m_UIUpdateInterval)
  543. //{
  544. // m_UIUpdateTimer = 0.0f;
  545. // if (m_ValidHistoryFrames >= m_History.Count)
  546. // {
  547. // m_ValidHistoryFrames = 0;
  548. // m_AverageTime = 0.0f;
  549. // m_MinTime = float.PositiveInfinity;
  550. // m_MaxTime = float.NegativeInfinity;
  551. // {
  552. // for (var i = 0; i < m_History.Count; i++)
  553. // {
  554. // var time = m_History[i];
  555. // m_AverageTime += time;
  556. // m_MinTime = Mathf.Min(m_MinTime, time);
  557. // m_MaxTime = Mathf.Max(m_MaxTime, time);
  558. // }
  559. // m_AverageTime /= m_History.Count;
  560. // }
  561. // {
  562. // m_History.Sort();
  563. // // Odd-length history?
  564. // if ((m_History.Count & 1) != 0)
  565. // {
  566. // m_MedianTime = m_History[m_History.Count / 2];
  567. // }
  568. // else
  569. // {
  570. // m_MedianTime = (m_History[m_History.Count / 2] + m_History[m_History.Count / 2 - 1]) / 2.0f;
  571. // }
  572. // }
  573. // }
  574. // var statistics = $"{m_History.Count} 帧样本:\naverage: {m_AverageTime * 1000.0f:F2}ms\nmedian: {m_MedianTime * 1000.0f:F2}ms\nmin: {m_MinTime * 1000.0f:F2}ms\nmax: {m_MaxTime * 1000.0f:F2}ms\n";
  575. // //Method: {m_Method} {UnityEngine.SceneManagement.SceneManager.GetActiveScene().name} |
  576. // if (m_UITime != null)
  577. // m_UITime.text = $"Cam: {mUVCCameraInfo.CurrentWidth}x{mUVCCameraInfo.CurrentHeight}{(mUVCTexture2D? ",T2D:" : "")}{(mUVCTexture2D? mUVCTexture2D.width+ "x" : "")}{(mUVCTexture2D ? mUVCTexture2D.height:"")} \nLast Frame: {dt * 1000.0f:F2}ms \n{statistics}";
  578. //}
  579. //UpdateInputs();
  580. }
  581. Vector2 targetPos = Vector2.zero;
  582. Vector2 movePos = Vector2.zero;
  583. int moveSpeed = 20;
  584. public float filterDis = 3.0f;
  585. void onFilterPos(Vector2 _vector2Pos)
  586. {
  587. //主要用于模拟九轴时候的
  588. //添加一个偏移量,使得最后输出的准心是指向正中心
  589. Vector2 np = new Vector2((_vector2Pos.x - UVOffset.x) * Screen.width, (_vector2Pos.y - UVOffset.y) * Screen.height); //_vector2Pos.pixelToLocalPosition_AnchorCenter(Vector2.one, (transform as RectTransform).rect);
  590. if (Vector2.Distance(np, targetPos) >= filterDis)
  591. {
  592. targetPos = np;
  593. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(targetPos.x, targetPos.y, 0));
  594. //Vector2 np = new Vector2(uvCenterOffset.x * Screen.width, uvCenterOffset.y * Screen.height);
  595. //point -= np;
  596. InfraredCameraHelper?.InvokeOnPositionUpdate(targetPos);
  597. }
  598. //movePos = Vector3.Lerp(movePos, targetPos, Time.deltaTime * moveSpeed);
  599. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(movePos.x, movePos.y, 0));
  600. }
  601. Vector2[] _targetPoints2 = new Vector2[] { Vector2.zero, Vector2.zero };
  602. void onFilterPos2(Vector2 _vector2Pos, int index)
  603. {
  604. Vector2 np = new Vector2((_vector2Pos.x - UVOffset.x) * Screen.width, (_vector2Pos.y - UVOffset.y) * Screen.height);
  605. if (Vector2.Distance(np, _targetPoints2[index]) >= filterDis)
  606. {
  607. _targetPoints2[index] = np;
  608. InfraredCameraHelper.InvokeOnPositionUpdate2(_targetPoints2[index], index);
  609. }
  610. }
  611. #region 自动识别
  612. int Capture = 30;
  613. int Delay = 30;
  614. Vector2 EnterResolution;
  615. // int DefaultResolutionIndex;
  616. // readonly public int HighScreenLocateResolutionIndex = 2; // 自动识别时,摄像机分辨率固定为1280 * 720 ( 对应索引是2 )
  617. public void BtnScreenLocate()
  618. {
  619. if (DebugScreenImages.Count != 0)
  620. {
  621. screenIdentification = new o0.Project.ScreenIdentification();
  622. CameraSize = new o0.Geometry2D.Vector<int>(DebugScreenImages[0].width, DebugScreenImages[0].height);
  623. WebCamIsReady(DebugScreenImages[0]);
  624. CreateUVCTexture2DIfNeeded();
  625. }
  626. //Debug.Log("BtnScreenLocate Capture:" + Capture + " ,Delay: " + Delay);
  627. //screenIdentification.LocateScreen(Capture, Delay);
  628. OnLocateScreenEnter();
  629. }
  630. // bool log1 = false, log2 = false;
  631. public void OnLocateScreenEnter()
  632. {
  633. bAutomaticRecognition = true;
  634. bAutomaticRecognitionStart = true;
  635. ResetScreenIdentification();
  636. //DefaultResolutionIndex = InfraredDemoMain?.ResolutionIndex ?? 0; // 记录一下进入前的分辨率(游戏场景的分辨率,比识别时更低)
  637. //HighScreenLocateResolutionIndex = InfraredDemoMain.getTextureToResolutionNewIndex(); // index = 0
  638. // Debug.Log("[ScreenLocate] 开始捕获 DefaultResolutionIndex:" + DefaultResolutionIndex + " ,HighScreenLocateResolutionIndex:" + HighScreenLocateResolutionIndex);
  639. // InfraredDemoMain?.SetResolutionNew(HighScreenLocateResolutionIndex);
  640. EnterResolution = mUVCCameraInfo.Size;// 记录一下进入前的分辨率(游戏场景的分辨率,比识别时更低)
  641. Vector2 _HighResolution = mUVCCameraInfo.CurrentCalibrationResolution; //最高的分辨率
  642. Resize((int)_HighResolution.x, (int)_HighResolution.y);
  643. if (DebugOnZIMDemo)
  644. screenIdentification.LocateScreen();
  645. //CreateUVCTexture2DIfNeeded();
  646. // log1 = true;
  647. // log2 = true;
  648. }
  649. public void OnLocateScreenEnd()
  650. {
  651. bAutomaticRecognitionEnd = true;
  652. // 记录本次屏幕识别的分辨率(目前采用高分辨率做识别,识别结束后调回低分辨率)
  653. //InfraredDemoMain?.SetResolutionNew(DefaultResolutionIndex);
  654. Resize((int)EnterResolution.x, (int)EnterResolution.y);
  655. }
  656. /**
  657. * 修改相机的实际分辨率
  658. */
  659. public void Resize(int width, int height)
  660. {
  661. if (mUVCCameraInfo == null) return;
  662. #if UNITY_ANDROID
  663. //发送修改指令给相机实际分辨率
  664. mUVCCameraInfo.SetCameraSize(width, height);
  665. #endif
  666. #if UNITY_STANDALONE_WIN
  667. // pc todo 看看怎么处理
  668. // ResizePC(width, height);
  669. #endif
  670. //mUVCCameraInfo.SetSize(width, height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  671. Debug.Log($"[ScreenLocate] 开始修改分辨率 mUVCCameraInfo origin:[{ mUVCCameraInfo.CurrentWidth },{ mUVCCameraInfo.CurrentHeight }]=>target:[{ width },{ height }]");
  672. // if (screenIdentification.isInitLocateScreen()) screenIdentification.bStartLocateScreen = true;
  673. }
  674. /// <summary>
  675. /// pc修改分辨率
  676. /// </summary>
  677. /// <param name="width"></param>
  678. /// <param name="height"></param>
  679. public void ResizePC(int width, int height)
  680. {
  681. if (mUVCCameraInfo == null) return;
  682. //if (screenIdentification.isInitLocateScreen()) screenIdentification.bStartLocateScreen = true;
  683. // PcWebCamera pcWebCamera = GetComponent<PcWebCamera>();
  684. // if(pcWebCamera.webCamTexture == null || !pcWebCamera.webCamTexture.isPlaying) return;
  685. //StartCoroutine(ResetWebCam(pcWebCamera, width, height));
  686. mUVCCameraInfo.SetSize(width, height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  687. Debug.Log("[ScreenLocate] Resize mUVCCameraInfo.SetSize: [" + mUVCCameraInfo.CurrentWidth + "," + mUVCCameraInfo.CurrentHeight + "]");
  688. }
  689. private System.Collections.IEnumerator ResetWebCam(PcWebCamera pcWebCamera, int newWidth, int newHeight)
  690. {
  691. WebCamTexture _webCamTexture = pcWebCamera.webCamTexture;
  692. // Stop the current WebCamTexture
  693. _webCamTexture.Stop();
  694. // Trigger OnWebCamStopped event
  695. // OnWebCamStopped?.Invoke();
  696. // Wait for a short time to ensure resources are released
  697. yield return new WaitForSeconds(0.5f);
  698. // Create a new WebCamTexture with the new dimensions
  699. _webCamTexture = new WebCamTexture(newWidth, newHeight);
  700. pcWebCamera.webCamTexture = _webCamTexture;
  701. mUVCTexture = _webCamTexture;
  702. // Restart the camera
  703. yield return StartCoroutine(StartWebCam(pcWebCamera));
  704. }
  705. private System.Collections.IEnumerator StartWebCam(PcWebCamera pcWebCamera)
  706. {
  707. WebCamTexture _webCamTexture = pcWebCamera.webCamTexture;
  708. _webCamTexture.Play();
  709. // Wait until the WebCamTexture is playing
  710. while (!_webCamTexture.isPlaying)
  711. {
  712. yield return null;
  713. }
  714. // Trigger OnWebCamStarted event
  715. //OnWebCamStarted?.Invoke();
  716. mUVCCameraInfo.SetSize(_webCamTexture.width, _webCamTexture.height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  717. Debug.Log("[ScreenLocate] ResizePc mUVCCameraInfo.SetSize: [" + mUVCCameraInfo.CurrentWidth + "," + mUVCCameraInfo.CurrentHeight + "]");
  718. // if(screenIdentification.isInitLocateScreen())screenIdentification.bStartLocateScreen = true;
  719. }
  720. #endregion
  721. public void BtnScreenMap()
  722. {
  723. ToMode(Mode.ScreenMap);
  724. }
  725. //进入手动定位屏幕
  726. public void BtnScreenLocateManual()
  727. {
  728. ToMode(Mode.ScreenLocateManual);
  729. }
  730. // 重置屏幕识别的数据
  731. public void ResetScreenIdentification()
  732. {
  733. screenIdentification.Screen.Active = false;
  734. }
  735. /// <summary>
  736. /// 固定的顶点顺序: 左下,右下,左上,右上
  737. /// </summary>
  738. public static List<Vector2> quadUnityVectorList = new();
  739. /// <summary>
  740. /// 打印信息
  741. /// </summary>
  742. /// <param name="list">左下,右下,左上,右上</param>
  743. /// <returns></returns>
  744. public string PrintVector2List(List<Vector2> list)
  745. {
  746. if (screenIdentification == null || !screenIdentification.Screen.Active) return "[]";
  747. string result = "";
  748. if (list.Count == 4)
  749. {
  750. result = "左下" + list[0].ToString() + ",右下" + list[1].ToString() + ",左上" + list[2].ToString() + ",右上" + list[3].ToString();
  751. }
  752. else
  753. {
  754. result = "count != 4 error";
  755. }
  756. //foreach (Vector2 vector in list)
  757. //{
  758. // result += vector.ToString() + " ";
  759. //}
  760. //Debug.Log(result);
  761. return result;
  762. }
  763. /// <summary>
  764. /// 判断是否存在NaN
  765. /// </summary>
  766. /// <param name="vectors"></param>
  767. /// <returns></returns>
  768. public bool ContainsNaN(List<Vector2> vectors)
  769. {
  770. foreach (var v in vectors)
  771. {
  772. if (float.IsNaN(v.x) || float.IsNaN(v.y))
  773. {
  774. return true;
  775. }
  776. }
  777. return false;
  778. }
  779. // 标记屏幕的四个角, ScreenQuadObject 下挂了4个子节点用于标记
  780. public void ShowScreen(RectTransform ScreenQuadObject, QuadrilateralInCamera screen)
  781. {
  782. if (screen == null)
  783. {
  784. Info.text = "识别屏幕失败";
  785. return;
  786. }
  787. Info.text = "已识别到屏幕";
  788. if (ScreenQuadObject && ScreenQuadObject.childCount >= 4)
  789. {
  790. ScreenQuadObject.gameObject.SetActive(true);
  791. for (int i = 0; i < 4; i++)
  792. {
  793. if (DebugOnZIMDemo)
  794. {
  795. RectTransform t = ScreenQuadObject.GetChild(i) as RectTransform;
  796. t.anchoredPosition = screen.Quad[i].pixelToLocalPosition_AnchorCenter(screen.CameraSize, ScreenQuadObject.rect);
  797. }
  798. }
  799. }
  800. quadUnityVectorList = screen.GetUnityVertexNormalizedList(); // 记录四个点
  801. if (!ContainsNaN(quadUnityVectorList))
  802. {
  803. SaveScreenLocateVectorList();
  804. //SyncInfraredDemo();
  805. if (DebugOnZIMDemo)
  806. SyncInfraredScreenPositioningView();
  807. InfraredCameraHelper?.InvokeOnUVCPosUpdate(quadUnityVectorList);
  808. Debug.Log("[ScreenLocate] ShowScreen 已识别到屏幕,更新quadUnityVectorList:" + PrintVector2List(quadUnityVectorList));
  809. }
  810. else
  811. {
  812. Debug.LogError("[ScreenLocate] RefreshCameraSize 屏幕size改变,存在NaN值,重新校准:" + PrintVector2List(quadUnityVectorList));
  813. }
  814. }
  815. public void ShowScreen(QuadrilateralInCamera screen) => ShowScreen(ScreenQuad, screen);
  816. /// <summary>
  817. /// 校准点位置存储到本地
  818. /// </summary>
  819. static public void SaveScreenLocateVectorList()
  820. {
  821. string saveStr = string.Join(";", quadUnityVectorList.Select(v => $"{v.x},{v.y}")); //,{v.z}
  822. Debug.Log("SaveScreenLocateVectorList: " + saveStr);
  823. PlayerPrefs.SetString("ScreenLocateVectorList", saveStr);
  824. }
  825. /// <summary>
  826. /// 获取本地存储校准点位置
  827. /// </summary>
  828. static public bool GetScreenLocateVectorList()
  829. {
  830. string posListStr = PlayerPrefs.GetString("ScreenLocateVectorList", "");
  831. Debug.Log("GetScreenLocateVectorList:"+ posListStr);
  832. if (!string.IsNullOrWhiteSpace(posListStr))
  833. {
  834. quadUnityVectorList.Clear();
  835. quadUnityVectorList = posListStr.Split(';')
  836. .Select(s =>
  837. {
  838. string[] parts = s.Split(',');
  839. return new Vector2(float.Parse(parts[0]), float.Parse(parts[1]));
  840. })
  841. .ToList();
  842. return true;
  843. }
  844. else return false;
  845. }
  846. public Vector2 AdjustPointsOffset(Vector2 inputPoint,string type = "CameraLocation")
  847. {
  848. // 计算从原始中心到输入点的偏移量
  849. if (type == "CameraLocation")
  850. {
  851. CameraLocationOffset = inputPoint - screenIdentification.Screen.TransformToCamera(new Vector2(0.5f, 0.5f) * screenIdentification.Screen.UVSize);
  852. return CameraLocationOffset;
  853. }
  854. else {
  855. //ScreenUV
  856. UVOffset = inputPoint - new Vector2(0.5f, 0.5f);
  857. return UVOffset;
  858. }
  859. }
  860. /// <summary>
  861. /// 这里计算一个偏移后的cameraLocatoin位置
  862. /// </summary>
  863. /// <param name="cameraLocatoin"></param>
  864. /// <returns></returns>
  865. public Vector2 GetOffsetCameraLocation(Vector2 cameraLocatoin) {
  866. return cameraLocatoin - CameraLocationOffset;
  867. }
  868. void ToMode(Mode mode)
  869. {
  870. if (this.mode == mode)
  871. return;
  872. if (mode == Mode.ScreenMap)
  873. {
  874. if (!screenIdentification.Screen.Active)
  875. {
  876. Info.text = "先定位屏幕";
  877. return;
  878. }
  879. Info.text = "按ESC退出";
  880. SetScreen(Color.black);
  881. //Info.transform.SetAsLastSibling();
  882. this.mode = Mode.ScreenMap;
  883. }
  884. else if (mode == Mode.InfraredLocate)
  885. {
  886. Info.text = screenIdentification.Screen.Active ? "已定位屏幕" : "定位屏幕失败";
  887. //Info.text = "已识别到屏幕";
  888. SetScreen(null);
  889. foreach (var i in CrosshairInScreen)
  890. i.gameObject.SetActive(false);
  891. FullScreenImage.gameObject.SetActive(false);
  892. ScreenPixelCheaker.HideImage();
  893. //Info.transform.SetSiblingIndex(transform.childCount - 4);
  894. this.mode = Mode.InfraredLocate;
  895. #if (!NDEBUG && DEBUG && ENABLE_LOG)
  896. Console.WriteLine($"{TAG} Mode.InfraredLocate:已识别到屏幕:{screenIdentification.Screen.Active}");
  897. #endif
  898. }
  899. else if (mode == Mode.ScreenLocateManual)
  900. {
  901. Info.text = "左键单击屏幕 左下角";
  902. FullScreenImage.gameObject.SetActive(true);
  903. ScreenPixelCheaker.ShowImage();
  904. //Info.transform.SetSiblingIndex(transform.childCount - 1);
  905. // var newTex = WebCamera.webCamTexture.AutoLight(10);
  906. //DebugTexture(1, TextureToTexture2D(rawImage.texture));
  907. CreateUVCTexture2DIfNeeded();
  908. DebugTexture(7, mUVCTexture2D.zimAutoLight(brightness));
  909. //mUVCTexture2DTemp = TextureToTexture2D(mUVCCameraInfo.previewTexture);
  910. //DebugTexture(6, mUVCTexture2DTemp.zimAutoLight(brightness));
  911. this.mode = Mode.ScreenLocateManual;
  912. }
  913. }
  914. private Texture2D TextureToTexture2D(Texture texture, int width = 0, int height = 0)
  915. {
  916. if (width == 0)
  917. width = texture.width;
  918. if (height == 0)
  919. height = texture.height;
  920. Texture2D _texture2D = new Texture2D(width, height, TextureFormat.ARGB32, false, true);
  921. RenderTexture currentRT = RenderTexture.active;
  922. RenderTexture renderTexture = RenderTexture.GetTemporary(
  923. width,
  924. height,
  925. 0,
  926. RenderTextureFormat.ARGB32,
  927. RenderTextureReadWrite.Linear);
  928. Graphics.Blit(texture, renderTexture);
  929. RenderTexture.active = renderTexture;
  930. _texture2D.ReadPixels(new Rect(0, 0, width, height), 0, 0);
  931. _texture2D.Apply();
  932. RenderTexture.active = currentRT;
  933. RenderTexture.ReleaseTemporary(renderTexture);
  934. return _texture2D;
  935. }
  936. //public void CreateUVCTexture2DFocusSizeIfNeeded(int width, int height)
  937. //{
  938. // if (mUVCTexture2D != null)
  939. // Destroy(mUVCTexture2D);
  940. // mUVCTexture2D = TextureToTexture2D(mUVCTexture, width, height);
  941. //}
  942. /// <summary>
  943. /// 使用默认的mUVCTexture宽高
  944. /// </summary>
  945. private void CreateUVCTexture2DIfNeeded()
  946. {
  947. if (mUVCTexture2D != null)
  948. Destroy(mUVCTexture2D);
  949. mUVCTexture2D = TextureToTexture2D(mUVCTexture);
  950. }
  951. /// <summary>
  952. /// 根据宽高调整mUVCTexture2D
  953. /// </summary>
  954. /// <param name="width"></param>
  955. /// <param name="height"></param>
  956. private void CreateUVCTexture2DIfNeeded(int width = 0, int height = 0)
  957. {
  958. if (mUVCTexture2D != null)
  959. Destroy(mUVCTexture2D);
  960. mUVCTexture2D = TextureToTexture2D(mUVCTexture, width, height);
  961. }
  962. #region DoubleButton
  963. private DateTime m_firstTime;
  964. private DateTime m_secondTime;
  965. private void Press()
  966. {
  967. Debug.Log("进入手动定位");
  968. BtnScreenLocateManual();
  969. resetTime();
  970. }
  971. public void OnDoubleClick()
  972. {
  973. //超时重置
  974. if (!m_firstTime.Equals(default(DateTime)))
  975. {
  976. var intervalTime = DateTime.Now - m_firstTime;
  977. float milliSeconds = intervalTime.Seconds * 1000 + intervalTime.Milliseconds;
  978. if (milliSeconds >= 400)
  979. resetTime();
  980. }
  981. // 按下按钮时对两次的时间进行记录
  982. if (m_firstTime.Equals(default(DateTime)))
  983. m_firstTime = DateTime.Now;
  984. else
  985. m_secondTime = DateTime.Now;
  986. // 在第二次点击触发,时差小于400ms触发
  987. if (!m_firstTime.Equals(default(DateTime)) && !m_secondTime.Equals(default(DateTime)))
  988. {
  989. var intervalTime = m_secondTime - m_firstTime;
  990. float milliSeconds = intervalTime.Seconds * 1000 + intervalTime.Milliseconds;
  991. if (milliSeconds < 400)
  992. Press();
  993. else
  994. resetTime();
  995. }
  996. }
  997. private void resetTime()
  998. {
  999. m_firstTime = default(DateTime);
  1000. m_secondTime = default(DateTime);
  1001. }
  1002. #endregion
  1003. #region 性能检测相关
  1004. void InvalidateTimings()
  1005. {
  1006. m_ValidHistoryFrames = 0;
  1007. m_AverageTime = float.NaN;
  1008. m_MedianTime = float.NaN;
  1009. m_MinTime = float.NaN;
  1010. m_MaxTime = float.NaN;
  1011. }
  1012. void UpdateInputs()
  1013. {
  1014. //重置
  1015. if (Input.GetKeyDown(KeyCode.UpArrow))
  1016. {
  1017. InvalidateTimings();
  1018. }
  1019. }
  1020. #endregion
  1021. }