ScreenLocate.cs 45 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209
  1. #define ENABLE_LOG
  2. using InfraredManager;
  3. using o0;
  4. using SLAMUVC;
  5. using System;
  6. using System.Collections;
  7. using System.Collections.Generic;
  8. using System.Linq;
  9. using UnityEngine;
  10. using UnityEngine.Experimental.AI;
  11. using UnityEngine.UI;
  12. using ZIM;
  13. using ZIM.Unity;
  14. using static SLAMUVC.UVCManager;
  15. using Color = UnityEngine.Color;
  16. using Time = UnityEngine.Time;
  17. [RequireComponent(typeof(Canvas))]
  18. public partial class ScreenLocate : MonoBehaviour
  19. {
  20. public InfraredCameraHelper InfraredCameraHelper;
  21. private const string TAG = "ScreenLocate#";
  22. public enum InfraredCount : int
  23. {
  24. Single = 1,
  25. Double = 2
  26. }
  27. enum Mode
  28. {
  29. InfraredLocate,
  30. ScreenMap,
  31. ScreenLocateManual
  32. }
  33. enum Platform
  34. {
  35. Window,
  36. Android
  37. }
  38. Platform mPlatform = Platform.Android;
  39. public enum ScreenIdentificationTag
  40. {
  41. // 屏幕定位的方式,手动、半自动、自动
  42. Manual,
  43. SemiAuto,
  44. Auto
  45. }
  46. // 2个灯,顺序根据红外灯的大小 由大到小, 坐标通过 InfraredSpot.ScreenUV 和 InfraredSpot.CameraLocation 获得
  47. public InfraredSpot[] InfraredSpots
  48. {
  49. get
  50. {
  51. infraredCount = InfraredCount.Double;
  52. return infraredSpotBuffer;
  53. }
  54. }
  55. // 1个灯, 坐标通过 InfraredSpot.ScreenUV 和 InfraredSpot.CameraLocation 获得
  56. public InfraredSpot InfraredSpotSingle
  57. {
  58. get
  59. {
  60. infraredCount = InfraredCount.Single;
  61. return infraredSpotBuffer[0];
  62. }
  63. }
  64. public InfraredSpot[] infraredSpotBuffer;
  65. public string GetInfraredCount() { return infraredCount.ToString(); }
  66. /// <summary>
  67. /// 定位之后,可能有3种结果(手动、半自动、自动),从中选择一种作为最终识别到的屏幕。
  68. /// 如果选择的是null,即没有识别到屏幕,则返回false,否则返回true
  69. /// </summary>
  70. public bool SelectScreenAfterLocate(ScreenIdentificationTag tag) => ScreenIdentification.SelectScreenAfterLocate(tag);
  71. /// <summary>
  72. /// 获取算法执行过程中输出的纹理,0原图,1半自动识别到的全部线段,2屏幕黑白色差,3识别结果,4屏幕色差叠加识别结果,5半自动时的备选线段
  73. /// </summary>
  74. public Texture2D[] OutputTextures => outputTexture2D;
  75. /// <summary>
  76. /// CameraLocation 的偏移量
  77. /// </summary>
  78. public Vector2 CameraLocationOffset { get; set; } = new Vector2(0, 0);
  79. public Vector2 UVOffset { get; set; } = new Vector2(0, 0);
  80. // public InfraredDemo InfraredDemoMain => FindObjectOfType<InfraredDemo>();
  81. #region UVC 处理的对象
  82. //public UVCManager mUVCManager;
  83. public CameraInfo mUVCCameraInfo;
  84. public bool getUVCCameraInfo => mUVCCameraInfo != null ? true : false;
  85. public Vector2 getUVCCameraInfoSize => getUVCCameraInfo ? mUVCCameraInfo.Size : new Vector2(320, 240);
  86. private Texture mUVCTexture;
  87. public Texture getUVCTexture => mUVCTexture;
  88. public Texture setUVCTexture {
  89. set {
  90. mUVCTexture = value;
  91. }
  92. }
  93. private Texture2D mUVCTexture2D;
  94. // [SerializeField] Texture2DArray mUVCOutArray;
  95. #endregion
  96. public Text Info;
  97. public List<RectTransform> CrosshairInCamera;
  98. public List<RectTransform> CrosshairInScreen;
  99. public RectTransform ScreenQuad;
  100. public Toggle SaveToggle;
  101. public Toggle FullScreenToggle;
  102. public LineGenerator UILineGenerator;
  103. public bool ShowScreenQuad = false;
  104. // 显示在demo上的rawImage
  105. public List<RawImage> outputRawImages;
  106. readonly Texture2D[] outputTexture2D = new Texture2D[8];
  107. public RawImage FullScreenImage;
  108. public PixelCheaker ScreenPixelCheaker;
  109. public InfraredSpotSettings InfraredSpotSettings;
  110. // 全局记录当前算法中的CameraSize,红外识别和屏幕识别都会使用到
  111. public o0.Geometry2D.Vector<int> CameraSize { get; set; }
  112. public List<Texture2D> DebugScreenImages = new List<Texture2D>();
  113. public bool DebugOnZIMDemo = false;
  114. // private SynchronizationContext mainContext;
  115. //是否单点显示
  116. public bool bSinglePoint = true;//默认单点识别
  117. [NonSerialized] public InfraredCount infraredCount = InfraredCount.Single; // 识别红外灯的数量,1个或者2个
  118. public float ReDoLocateCalibrationRatio { get; private set; } // 半自动定位时校准的距离比例,以手动的结果来校准,离手动太远的线段会被舍弃
  119. bool bIdentifyRed = true;//默认设备红色
  120. bool bIdentifyGreen = true;
  121. #region 性能检测相关
  122. public Text m_UITime;
  123. const float m_UIUpdateInterval = 0.1f;
  124. float m_UIUpdateTimer = 0.0f;
  125. List<float> m_History = new List<float>(100);
  126. int m_ValidHistoryFrames = 0;
  127. float m_AverageTime = float.NaN;
  128. float m_MedianTime = float.NaN;
  129. float m_MinTime = float.NaN;
  130. float m_MaxTime = float.NaN;
  131. public float updateInterval = 0.5F;
  132. private double lastInterval;
  133. private int frames = 0;
  134. private float fps;
  135. public Text m_FPS;
  136. #endregion
  137. #region PC部分参数
  138. //亮度
  139. public float pcBrightness { get; set; } = 0.0f;
  140. //对比度
  141. public float pcContrast { get; set; } = 0.0f;
  142. #endregion
  143. // 红外灯识别算法
  144. InfraredLocate infraredLocate;
  145. // 屏幕识别算法
  146. o0.Project.ScreenIdentification screenIdentification;
  147. public o0.Project.ScreenIdentification ScreenIdentification => screenIdentification;
  148. RectTransform canvas;
  149. Mode mode;
  150. //List<(Vector2 pos, GameObject go)> pointManual = new List<(Vector2, GameObject)>();
  151. //o0.Project.WebCam o0WebCam = null;
  152. /// <summary>
  153. /// 正在识别的状态,自动识别时候记录
  154. /// </summary>
  155. bool bAutomaticRecognition { get; set; } = false;//进行捕获时
  156. bool bAutomaticRecognitionStart { get; set; } = false;//是否进行捕获
  157. bool bAutomaticRecognitionEnd { get; set; } = false;//是否结束捕获
  158. [NonSerialized] public RectTransform BackQuad = null;
  159. static public ScreenLocate Main;
  160. static public void AutoLightPixels(Color[] pixels, int width, int height)
  161. {
  162. if (Main.DebugOnZIMDemo)
  163. {
  164. var newTex = pixels.zimAutoLightSimple(width, height);
  165. DebugTexture(7, newTex);
  166. try
  167. {
  168. Main.FullScreenImage.texture = newTex;
  169. }
  170. catch { }
  171. }
  172. }
  173. static public void DebugTexture(int index, Texture2D texture)
  174. {
  175. LateDestory(Main.outputTexture2D[index]);
  176. Main.outputTexture2D[index] = texture;
  177. try
  178. {
  179. Main.outputRawImages[index].texture = texture;
  180. }
  181. catch { }
  182. }
  183. static void LateDestory(UnityEngine.Object o) => Main.StartCoroutine(Main.LateDestoryIEnum(o));
  184. static public void SetScreen(UnityEngine.Color? color = null)
  185. {
  186. if (Main.BackQuad == null)
  187. {
  188. var canvas = GameObject.Find("WebCameraView").GetComponent<RectTransform>();
  189. var background = canvas.Find("Background");
  190. Main.BackQuad = background.GetChild(0).GetComponent<RectTransform>();
  191. }
  192. Main.BackQuad.parent.gameObject.SetActive(color != null);
  193. Main.BackQuad.GetComponent<RawImage>().color = color ?? Color.black;
  194. //Debug.Log("Set Screen " + color.GetColorName());
  195. }
  196. static public void SetScreen(Rect rect, UnityEngine.Color? color = null)
  197. {
  198. if (Main.BackQuad == null)
  199. {
  200. var canvas = GameObject.Find("WebCameraView").GetComponent<RectTransform>();
  201. var background = canvas.Find("Background");
  202. Main.BackQuad = background.GetChild(0).GetComponent<RectTransform>();
  203. }
  204. Main.BackQuad.parent.gameObject.SetActive(color != null);
  205. Main.BackQuad.anchorMin = rect.min;
  206. Main.BackQuad.anchorMax = rect.max;
  207. Main.BackQuad.GetComponent<RawImage>().color = color ?? Color.black;
  208. //Debug.Log("Set Screen " + color.GetColorName());
  209. }
  210. static void DebugBackQuad(Rect? rect = null)
  211. {
  212. if (Main.BackQuad)
  213. {
  214. Main.BackQuad.parent.GetComponent<RawImage>().enabled = false;
  215. Main.BackQuad.GetComponent<RawImage>().color = Color.white;
  216. Main.BackQuad.parent.gameObject.SetActive(!Main.BackQuad.parent.gameObject.activeSelf);
  217. if (rect.HasValue)
  218. {
  219. Main.BackQuad.anchorMin = rect.Value.min;
  220. Main.BackQuad.anchorMax = rect.Value.max;
  221. }
  222. }
  223. }
  224. //public void ReSizeTexture(int width, int height)
  225. //{
  226. // Debug.Log("Cur mUVCTexture Size: [" + mUVCTexture.width + "," + mUVCTexture.height + "]");
  227. // if (mUVCTexture.width < width || mUVCTexture.height < height) // 如果当前分辨率太小,则重新new一个texture
  228. // {
  229. // Texture2D tex = new Texture2D(
  230. // width, height,
  231. // TextureFormat.ARGB32,
  232. // false, /* mipmap */
  233. // true /* linear */);
  234. // tex.filterMode = FilterMode.Point;
  235. // tex.Apply();
  236. // mUVCTexture = tex;
  237. // mUVCCameraInfo.previewTexture = tex;
  238. // var nativeTexPtr = mUVCCameraInfo.previewTexture.GetNativeTexturePtr();
  239. // }
  240. //}
  241. void Awake()
  242. {
  243. Main = this;
  244. #if !UNITY_EDITOR_WIN
  245. DebugOnZIMDemo = false;
  246. #endif
  247. //if (mUVCDrawer)
  248. // mUVCDrawer.StartPreviewAction += UVCIsReady;
  249. }
  250. void OnDestroy()
  251. {
  252. //if (mUVCDrawer)
  253. // mUVCDrawer.StartPreviewAction -= UVCIsReady;
  254. }
  255. void Start()
  256. {
  257. //mainContext = SynchronizationContext.Current;
  258. canvas = transform.GetComponent<RectTransform>();
  259. mode = Mode.InfraredLocate;
  260. if (DebugScreenImages.Count != 0 && DebugOnZIMDemo)
  261. {
  262. screenIdentification = new o0.Project.ScreenIdentification();
  263. screenIdentification.LocateScreen();
  264. }
  265. infraredCount = InfraredCount.Single;
  266. ReDoLocateCalibrationRatio = 0.125f;
  267. #region 性能检测相关
  268. for (var i = 0; i < m_History.Capacity; ++i)
  269. {
  270. m_History.Add(0.0f);
  271. }
  272. lastInterval = Time.realtimeSinceStartup;
  273. frames = 0;
  274. #endregion
  275. }
  276. IEnumerator LateDestoryIEnum(UnityEngine.Object o)
  277. {
  278. if (o)
  279. {
  280. yield return new WaitForEndOfFrame();
  281. Destroy(o);
  282. }
  283. }
  284. //ZIMWebCamera场景使用
  285. public void WebCamIsReady(Texture texture)
  286. {
  287. mPlatform = Platform.Window;
  288. mUVCTexture = texture;
  289. mUVCCameraInfo = new CameraInfo(mUVCTexture);
  290. brightness = 0;
  291. //UVC准备好
  292. InfraredCameraHelper?.InvokeOnUVCIsReady(mUVCCameraInfo);
  293. }
  294. /// <summary>
  295. /// UVCManager 创建初始化时候,更新此函数
  296. /// </summary>
  297. /// <param name="cameraInfo"></param>
  298. public void UVCIsReady(CameraInfo cameraInfo)
  299. {
  300. mPlatform = Platform.Android;
  301. mUVCTexture = cameraInfo.previewTexture;
  302. mUVCCameraInfo = cameraInfo;
  303. Debug.Log("UVCIsReady:" + mUVCCameraInfo);
  304. //UVC准备好
  305. InfraredCameraHelper?.InvokeOnUVCIsReady(mUVCCameraInfo);
  306. }
  307. /// <summary>
  308. /// 获取新的 previewTexture
  309. /// </summary>
  310. public void UVCUpdate(bool bChange)
  311. {
  312. mUVCTexture = mUVCCameraInfo.previewTexture;
  313. Debug.Log("[ScreenLocate] UVCUpdate:" + mUVCCameraInfo + ",bChange:"+bChange);
  314. InfraredCameraHelper?.InvokeOnUVCIsUpdate();
  315. //这里判断是否进入自动识别?
  316. if (bAutomaticRecognitionStart) {
  317. bAutomaticRecognitionStart = false;
  318. Debug.Log("[ScreenLocate] UVCUpdate 开始自动识别 Capture:" + Capture + " ,Delay: " + Delay);
  319. screenIdentification.LocateScreen(Capture, Delay);
  320. }
  321. if (bAutomaticRecognitionEnd) {
  322. bAutomaticRecognitionEnd = false;
  323. Debug.Log("[ScreenLocate] UVCUpdate 结束捕获,当前摄像机分辨率为: " + mUVCCameraInfo.Size);
  324. bAutomaticRecognition = false;
  325. }
  326. }
  327. int brightness = 0;
  328. /// <summary>
  329. /// 设置算法红外灯的亮度值
  330. /// </summary>
  331. /// <param name="value"></param>
  332. public void SetInfraredLocateBrightnessThreshold(float value)
  333. {
  334. if (infraredLocate != null)
  335. {
  336. if (value >= 0 && value <= 1)
  337. infraredLocate.SetBrightnessThreshold(value); // 参数是 红外灯的亮度阈值,阈值越小能够检测到的亮度就越低,默认值是0.93
  338. }
  339. }
  340. void Update()
  341. {
  342. //++frames;
  343. //float timeNow = Time.realtimeSinceStartup;
  344. //if (timeNow > lastInterval + updateInterval)
  345. //{
  346. // fps = (float)(frames / (timeNow - lastInterval));
  347. // frames = 0;
  348. // lastInterval = timeNow;
  349. //}
  350. //if (m_FPS != null)
  351. // m_FPS.text = "FPS:" + fps.ToString("f2");
  352. if (mUVCCameraInfo == null) return;
  353. if (screenIdentification == null)
  354. {
  355. screenIdentification = new o0.Project.ScreenIdentification();
  356. //pc 不切换分辨率了
  357. #if UNITY_ANDROID
  358. //screenIdentification.OnLocateScreenEnter += OnLocateScreenEnter;
  359. screenIdentification.OnLocateScreenEnd += OnLocateScreenEnd;
  360. #endif
  361. }
  362. if (infraredLocate == null)
  363. {
  364. infraredLocate = new InfraredLocate(mUVCCameraInfo, screenIdentification, InfraredSpotSettings, ScreenPixelCheaker);
  365. //InfraredDemo 初始化
  366. //float redfilterValue = PlayerPrefs.GetFloat("Init redFilterSliderValue", 0.8f);
  367. //Debug.Log("Init Red filterValue:" + redfilterValue);
  368. //infraredLocate.SetBrightnessThreshold(redfilterValue); // 参数是 红外灯的亮度阈值,阈值越小能够检测到的亮度就越低,默认值是0.93
  369. }
  370. if (screenIdentification.Screen.RefreshCameraSize(getUVCCameraInfoSize)) // 同步分辨率, 分辨率变化后还需同步到InfraredDemo
  371. {
  372. quadUnityVectorList = screenIdentification.Screen.QuadInCamera.GetUnityVertexNormalizedList();
  373. if (!ContainsNaN(quadUnityVectorList))
  374. {
  375. SaveScreenLocateVectorList();
  376. //SyncInfraredDemo();
  377. //SyncInfraredScreenPositioningView();
  378. InfraredCameraHelper?.InvokeOnUVCPosUpdate(quadUnityVectorList);
  379. Debug.Log("[ScreenLocate] RefreshCameraSize 屏幕size改变:[" + (int)getUVCCameraInfoSize.x + "," + (int)getUVCCameraInfoSize.y + "]");
  380. Debug.Log("[ScreenLocate] RefreshCameraSize 屏幕size改变,刷新quadUnityVectorList:" + PrintVector2List(quadUnityVectorList));
  381. }
  382. else {
  383. Debug.LogError("[ScreenLocate] RefreshCameraSize 屏幕size改变,存在NaN值,重新校准:" + PrintVector2List(quadUnityVectorList));
  384. }
  385. if (DebugOnZIMDemo)
  386. Main.ShowScreen(screenIdentification.Screen.QuadInCamera);
  387. }
  388. //var t0 = Time.realtimeSinceStartup;
  389. /* New*/
  390. //Debug.Log((mUVCCameraInfo != null) +" = "+ mUVCCameraInfo.IsPreviewing + " = "+ screenIdentification.Screen.Active);
  391. if (mUVCCameraInfo != null && mUVCCameraInfo.IsPreviewing) // 成功定位屏幕后才做红外识别
  392. {
  393. //if (bAutomaticRecognition)
  394. //{
  395. // //识别的过程使用的分辨率
  396. // //CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  397. // if (log1)
  398. // {
  399. // log1 = false;
  400. // Debug.Log("[ScreenLocate] log1:[" + (int)getUVCCameraInfoSize.x + ", " + (int)getUVCCameraInfoSize.y + "]");
  401. // }
  402. //}
  403. //else
  404. //{
  405. // //自动识别完成后使用相机分辨率大小 getUVCCameraInfoSize
  406. // //CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  407. // if (log2)
  408. // {
  409. // log2 = false;
  410. // Debug.Log("[ScreenLocate] log2:[" + (int)getUVCCameraInfoSize.x + ", " + (int)getUVCCameraInfoSize.y + "]");
  411. // }
  412. //}
  413. //如果是连接了蓝牙设备,并且不是9轴设备。不进行识别算法处理
  414. if (BluetoothAim.ins?.status == BluetoothStatusEnum.ConnectSuccess && AimHandler.ins && AimHandler.ins.bRuning9Axis()) return;
  415. //根据getUVCCameraInfoSize 分辨率渲染
  416. CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  417. if (!screenIdentification.Update(mUVCTexture2D))
  418. {
  419. CameraSize = new o0.Geometry2D.Vector<int>(mUVCTexture2D.width, mUVCTexture2D.height);
  420. var pixels = mUVCTexture2D.GetPixels(); // 从左往右、从下往上
  421. AutoLightPixels(pixels, CameraSize.x, CameraSize.y);
  422. //return;
  423. //InfraredSpots = infraredLocate.Update(pixels);
  424. if (bSinglePoint)
  425. infraredSpotBuffer = infraredLocate.UpdateSingle(pixels);
  426. else
  427. infraredSpotBuffer = infraredLocate.Update(pixels);
  428. if (mode == Mode.ScreenLocateManual)
  429. {
  430. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  431. {
  432. if (infraredSpotBuffer[i].CameraLocation != null)
  433. {
  434. // 检测到光点
  435. var posInCanvas = infraredSpotBuffer[i].CameraLocation.Value.pixelToLocalPosition_AnchorCenter(CameraSize, FullScreenImage.rectTransform.rect);
  436. CrosshairInCamera[i].gameObject.SetActive(true);
  437. CrosshairInCamera[i].anchoredPosition = posInCanvas;
  438. }
  439. else
  440. CrosshairInCamera[i].gameObject.SetActive(false);
  441. }
  442. }
  443. else if (mode == Mode.InfraredLocate)
  444. {
  445. if (mPlatform == Platform.Window) //渲染ui上面的点。进入游戏可以隐藏
  446. {
  447. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  448. {
  449. if (infraredSpotBuffer[i].CameraLocation != null)
  450. {
  451. // 检测到光点
  452. var posInCanvas = infraredSpotBuffer[i].CameraLocation.Value.pixelToLocalPosition_AnchorCenter(CameraSize, outputRawImages[0].rectTransform.rect);
  453. CrosshairInCamera[i].gameObject.SetActive(true);
  454. CrosshairInCamera[i].anchoredPosition = posInCanvas;
  455. }
  456. else
  457. CrosshairInCamera[i].gameObject.SetActive(false);
  458. }
  459. }
  460. //手机端使用 mPlatform == Platform.Android &&
  461. //通用,手机 和 PC
  462. if (infraredSpotBuffer.Length > 0)
  463. {
  464. int redIndex = 0;
  465. int greenIndex = 1;
  466. //仅仅第一个点显示(如果最大点出界了会闪烁)
  467. if (bSinglePoint)
  468. {
  469. redIndex = 0; //单点识别是,可以选择切换颜色
  470. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  471. {
  472. string str = "Single:";
  473. Info.text = str + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  474. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  475. onFilterPos(infraredSpotBuffer[redIndex].ScreenUV.Value);
  476. }
  477. }
  478. else
  479. {
  480. //雙點模式下選擇第一個點
  481. if (bIdentifyRed && !bIdentifyGreen)
  482. {
  483. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  484. {
  485. Info.text = "Red" + redIndex + ":" + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  486. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  487. onFilterPos2(infraredSpotBuffer[redIndex].ScreenUV.Value, redIndex);
  488. }
  489. else
  490. {
  491. Info.text = "未检测到红色最大点!";
  492. }
  493. }
  494. else if (!bIdentifyRed && bIdentifyGreen)
  495. {
  496. if (infraredSpotBuffer[greenIndex].ScreenUV != null)
  497. {
  498. Info.text = "Green:" + infraredSpotBuffer[greenIndex].ScreenUV.Value.ToString("F4");
  499. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[greenIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[greenIndex].ScreenUV.Value.y * Screen.height, 0));
  500. onFilterPos2(infraredSpotBuffer[greenIndex].ScreenUV.Value, greenIndex);
  501. }
  502. else
  503. {
  504. Info.text = "未检测到绿色点!";
  505. }
  506. }
  507. else
  508. {
  509. //两个不选择和两个全选都跑识别两个点
  510. //自動切換 检测到光点
  511. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  512. {
  513. Info.text = "Red:" + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  514. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  515. onFilterPos2(infraredSpotBuffer[redIndex].ScreenUV.Value, redIndex);
  516. }
  517. else if (infraredSpotBuffer[greenIndex].ScreenUV != null)
  518. {
  519. Info.text = "Green:" + infraredSpotBuffer[greenIndex].ScreenUV.Value.ToString("F4");
  520. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[greenIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[greenIndex].ScreenUV.Value.y * Screen.height, 0));
  521. onFilterPos2(infraredSpotBuffer[greenIndex].ScreenUV.Value, greenIndex);
  522. }
  523. else
  524. {
  525. Info.text = "未检测到点!";
  526. }
  527. }
  528. }
  529. }
  530. }
  531. else if (mode == Mode.ScreenMap && DebugOnZIMDemo)
  532. {
  533. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  534. {
  535. if (infraredSpotBuffer[i].ScreenUV != null)
  536. {
  537. // 检测到光点
  538. var posInCanvas = infraredSpotBuffer[i].ScreenUV.Value.pixelToLocalPosition_AnchorCenter(new Vector2(1, 1), canvas.rect);
  539. CrosshairInScreen[i].gameObject.SetActive(true);
  540. CrosshairInScreen[i].anchoredPosition = posInCanvas;
  541. }
  542. else
  543. CrosshairInScreen[i].gameObject.SetActive(false);
  544. }
  545. if (Input.GetKeyDown(KeyCode.Escape))
  546. ToMode(Mode.InfraredLocate);
  547. }
  548. }
  549. }
  550. //var t1 = Time.realtimeSinceStartup;
  551. //var dt = t1 - t0;
  552. //m_History[m_ValidHistoryFrames % m_History.Count] = dt;
  553. //++m_ValidHistoryFrames;
  554. //m_UIUpdateTimer += Time.deltaTime;
  555. //if (m_UIUpdateTimer >= m_UIUpdateInterval)
  556. //{
  557. // m_UIUpdateTimer = 0.0f;
  558. // if (m_ValidHistoryFrames >= m_History.Count)
  559. // {
  560. // m_ValidHistoryFrames = 0;
  561. // m_AverageTime = 0.0f;
  562. // m_MinTime = float.PositiveInfinity;
  563. // m_MaxTime = float.NegativeInfinity;
  564. // {
  565. // for (var i = 0; i < m_History.Count; i++)
  566. // {
  567. // var time = m_History[i];
  568. // m_AverageTime += time;
  569. // m_MinTime = Mathf.Min(m_MinTime, time);
  570. // m_MaxTime = Mathf.Max(m_MaxTime, time);
  571. // }
  572. // m_AverageTime /= m_History.Count;
  573. // }
  574. // {
  575. // m_History.Sort();
  576. // // Odd-length history?
  577. // if ((m_History.Count & 1) != 0)
  578. // {
  579. // m_MedianTime = m_History[m_History.Count / 2];
  580. // }
  581. // else
  582. // {
  583. // m_MedianTime = (m_History[m_History.Count / 2] + m_History[m_History.Count / 2 - 1]) / 2.0f;
  584. // }
  585. // }
  586. // }
  587. // var statistics = $"{m_History.Count} 帧样本:\naverage: {m_AverageTime * 1000.0f:F2}ms\nmedian: {m_MedianTime * 1000.0f:F2}ms\nmin: {m_MinTime * 1000.0f:F2}ms\nmax: {m_MaxTime * 1000.0f:F2}ms\n";
  588. // //Method: {m_Method} {UnityEngine.SceneManagement.SceneManager.GetActiveScene().name} |
  589. // if (m_UITime != null)
  590. // m_UITime.text = $"Cam: {mUVCCameraInfo.CurrentWidth}x{mUVCCameraInfo.CurrentHeight}{(mUVCTexture2D? ",T2D:" : "")}{(mUVCTexture2D? mUVCTexture2D.width+ "x" : "")}{(mUVCTexture2D ? mUVCTexture2D.height:"")} \nLast Frame: {dt * 1000.0f:F2}ms \n{statistics}";
  591. //}
  592. //UpdateInputs();
  593. }
  594. Vector2 targetPos = Vector2.zero;
  595. Vector2 movePos = Vector2.zero;
  596. int moveSpeed = 20;
  597. public float filterDis = 3.0f;
  598. void onFilterPos(Vector2 _vector2Pos)
  599. {
  600. //主要用于模拟九轴时候的
  601. //添加一个偏移量,使得最后输出的准心是指向正中心
  602. Vector2 np = new Vector2((_vector2Pos.x - UVOffset.x) * Screen.width, (_vector2Pos.y - UVOffset.y) * Screen.height); //_vector2Pos.pixelToLocalPosition_AnchorCenter(Vector2.one, (transform as RectTransform).rect);
  603. if (Vector2.Distance(np, targetPos) >= filterDis)
  604. {
  605. targetPos = np;
  606. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(targetPos.x, targetPos.y, 0));
  607. //Vector2 np = new Vector2(uvCenterOffset.x * Screen.width, uvCenterOffset.y * Screen.height);
  608. //point -= np;
  609. InfraredCameraHelper?.InvokeOnPositionUpdate(targetPos);
  610. }
  611. //movePos = Vector3.Lerp(movePos, targetPos, Time.deltaTime * moveSpeed);
  612. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(movePos.x, movePos.y, 0));
  613. }
  614. Vector2[] _targetPoints2 = new Vector2[] { Vector2.zero, Vector2.zero };
  615. void onFilterPos2(Vector2 _vector2Pos, int index)
  616. {
  617. Vector2 np = new Vector2((_vector2Pos.x - UVOffset.x) * Screen.width, (_vector2Pos.y - UVOffset.y) * Screen.height);
  618. if (Vector2.Distance(np, _targetPoints2[index]) >= filterDis)
  619. {
  620. _targetPoints2[index] = np;
  621. InfraredCameraHelper.InvokeOnPositionUpdate2(_targetPoints2[index], index);
  622. }
  623. }
  624. #region 自动识别
  625. int Capture = 30;
  626. int Delay = 30;
  627. Vector2 EnterResolution;
  628. // int DefaultResolutionIndex;
  629. // readonly public int HighScreenLocateResolutionIndex = 2; // 自动识别时,摄像机分辨率固定为1280 * 720 ( 对应索引是2 )
  630. public void BtnScreenLocate()
  631. {
  632. if (DebugScreenImages.Count != 0)
  633. {
  634. screenIdentification = new o0.Project.ScreenIdentification();
  635. CameraSize = new o0.Geometry2D.Vector<int>(DebugScreenImages[0].width, DebugScreenImages[0].height);
  636. WebCamIsReady(DebugScreenImages[0]);
  637. CreateUVCTexture2DIfNeeded();
  638. }
  639. //Debug.Log("BtnScreenLocate Capture:" + Capture + " ,Delay: " + Delay);
  640. //screenIdentification.LocateScreen(Capture, Delay);
  641. OnLocateScreenEnter();
  642. }
  643. // bool log1 = false, log2 = false;
  644. public void OnLocateScreenEnter()
  645. {
  646. bAutomaticRecognition = true;
  647. bAutomaticRecognitionStart = true;
  648. ResetScreenIdentification();
  649. //DefaultResolutionIndex = InfraredDemoMain?.ResolutionIndex ?? 0; // 记录一下进入前的分辨率(游戏场景的分辨率,比识别时更低)
  650. //HighScreenLocateResolutionIndex = InfraredDemoMain.getTextureToResolutionNewIndex(); // index = 0
  651. // Debug.Log("[ScreenLocate] 开始捕获 DefaultResolutionIndex:" + DefaultResolutionIndex + " ,HighScreenLocateResolutionIndex:" + HighScreenLocateResolutionIndex);
  652. // InfraredDemoMain?.SetResolutionNew(HighScreenLocateResolutionIndex);
  653. EnterResolution = mUVCCameraInfo.Size;// 记录一下进入前的分辨率(游戏场景的分辨率,比识别时更低)
  654. Vector2 _HighResolution = mUVCCameraInfo.CurrentCalibrationResolution; //最高的分辨率
  655. Resize((int)_HighResolution.x, (int)_HighResolution.y);
  656. if (DebugOnZIMDemo)
  657. screenIdentification.LocateScreen();
  658. #if UNITY_EDITOR
  659. UVCUpdate(false);
  660. #endif
  661. //CreateUVCTexture2DIfNeeded();
  662. // log1 = true;
  663. // log2 = true;
  664. }
  665. public void OnLocateScreenEnd()
  666. {
  667. bAutomaticRecognitionEnd = true;
  668. // 记录本次屏幕识别的分辨率(目前采用高分辨率做识别,识别结束后调回低分辨率)
  669. //InfraredDemoMain?.SetResolutionNew(DefaultResolutionIndex);
  670. Resize((int)EnterResolution.x, (int)EnterResolution.y);
  671. }
  672. /**
  673. * 修改相机的实际分辨率
  674. */
  675. public void Resize(int width, int height)
  676. {
  677. if (mUVCCameraInfo == null) return;
  678. #if UNITY_ANDROID
  679. //发送修改指令给相机实际分辨率
  680. mUVCCameraInfo.SetCameraSize(width, height);
  681. #endif
  682. #if UNITY_STANDALONE_WIN
  683. // pc todo 看看怎么处理
  684. // ResizePC(width, height);
  685. #endif
  686. //mUVCCameraInfo.SetSize(width, height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  687. Debug.Log($"[ScreenLocate] 开始修改分辨率 mUVCCameraInfo origin:[{ mUVCCameraInfo.CurrentWidth },{ mUVCCameraInfo.CurrentHeight }]=>target:[{ width },{ height }]");
  688. // if (screenIdentification.isInitLocateScreen()) screenIdentification.bStartLocateScreen = true;
  689. }
  690. /// <summary>
  691. /// pc修改分辨率
  692. /// </summary>
  693. /// <param name="width"></param>
  694. /// <param name="height"></param>
  695. public void ResizePC(int width, int height)
  696. {
  697. if (mUVCCameraInfo == null) return;
  698. //if (screenIdentification.isInitLocateScreen()) screenIdentification.bStartLocateScreen = true;
  699. // PcWebCamera pcWebCamera = GetComponent<PcWebCamera>();
  700. // if(pcWebCamera.webCamTexture == null || !pcWebCamera.webCamTexture.isPlaying) return;
  701. //StartCoroutine(ResetWebCam(pcWebCamera, width, height));
  702. mUVCCameraInfo.SetSize(width, height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  703. Debug.Log("[ScreenLocate] Resize mUVCCameraInfo.SetSize: [" + mUVCCameraInfo.CurrentWidth + "," + mUVCCameraInfo.CurrentHeight + "]");
  704. }
  705. private System.Collections.IEnumerator ResetWebCam(PcWebCamera pcWebCamera, int newWidth, int newHeight)
  706. {
  707. WebCamTexture _webCamTexture = pcWebCamera.webCamTexture;
  708. // Stop the current WebCamTexture
  709. _webCamTexture.Stop();
  710. // Trigger OnWebCamStopped event
  711. // OnWebCamStopped?.Invoke();
  712. // Wait for a short time to ensure resources are released
  713. yield return new WaitForSeconds(0.5f);
  714. // Create a new WebCamTexture with the new dimensions
  715. _webCamTexture = new WebCamTexture(newWidth, newHeight);
  716. pcWebCamera.webCamTexture = _webCamTexture;
  717. mUVCTexture = _webCamTexture;
  718. // Restart the camera
  719. yield return StartCoroutine(StartWebCam(pcWebCamera));
  720. }
  721. private System.Collections.IEnumerator StartWebCam(PcWebCamera pcWebCamera)
  722. {
  723. WebCamTexture _webCamTexture = pcWebCamera.webCamTexture;
  724. _webCamTexture.Play();
  725. // Wait until the WebCamTexture is playing
  726. while (!_webCamTexture.isPlaying)
  727. {
  728. yield return null;
  729. }
  730. // Trigger OnWebCamStarted event
  731. //OnWebCamStarted?.Invoke();
  732. mUVCCameraInfo.SetSize(_webCamTexture.width, _webCamTexture.height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  733. Debug.Log("[ScreenLocate] ResizePc mUVCCameraInfo.SetSize: [" + mUVCCameraInfo.CurrentWidth + "," + mUVCCameraInfo.CurrentHeight + "]");
  734. // if(screenIdentification.isInitLocateScreen())screenIdentification.bStartLocateScreen = true;
  735. }
  736. #endregion
  737. public void BtnScreenMap()
  738. {
  739. ToMode(Mode.ScreenMap);
  740. }
  741. //进入手动定位屏幕
  742. public void BtnScreenLocateManual()
  743. {
  744. ToMode(Mode.ScreenLocateManual);
  745. }
  746. // 重置屏幕识别的数据
  747. public void ResetScreenIdentification()
  748. {
  749. screenIdentification.Screen.Active = false;
  750. }
  751. // threshold 的值是0-1,0代表最近,1代表最远
  752. public void SetReDoLocateCalibrationRatio(float threshold)
  753. {
  754. const float MIN = 0.005f;
  755. const float MAX = 0.305f;
  756. ReDoLocateCalibrationRatio = MIN + (MAX - MIN) * threshold;
  757. }
  758. /// <summary>
  759. /// 固定的顶点顺序: 左下,右下,左上,右上
  760. /// </summary>
  761. public static List<Vector2> quadUnityVectorList = new();
  762. /// <summary>
  763. /// 打印信息
  764. /// </summary>
  765. /// <param name="list">左下,右下,左上,右上</param>
  766. /// <returns></returns>
  767. public string PrintVector2List(List<Vector2> list)
  768. {
  769. if (screenIdentification == null || !screenIdentification.Screen.Active) return "[]";
  770. string result = "";
  771. if (list.Count == 4)
  772. {
  773. result = "左下" + list[0].ToString() + ",右下" + list[1].ToString() + ",左上" + list[2].ToString() + ",右上" + list[3].ToString();
  774. }
  775. else
  776. {
  777. result = "count != 4 error";
  778. }
  779. //foreach (Vector2 vector in list)
  780. //{
  781. // result += vector.ToString() + " ";
  782. //}
  783. //Debug.Log(result);
  784. return result;
  785. }
  786. /// <summary>
  787. /// 判断是否存在NaN
  788. /// </summary>
  789. /// <param name="vectors"></param>
  790. /// <returns></returns>
  791. public bool ContainsNaN(List<Vector2> vectors)
  792. {
  793. foreach (var v in vectors)
  794. {
  795. if (float.IsNaN(v.x) || float.IsNaN(v.y))
  796. {
  797. return true;
  798. }
  799. }
  800. return false;
  801. }
  802. // 标记屏幕的四个角, ScreenQuadObject 下挂了4个子节点用于标记
  803. public void ShowScreen(RectTransform ScreenQuadObject, QuadrilateralInCamera screen)
  804. {
  805. if (screen == null)
  806. {
  807. Info.text = "识别屏幕失败";
  808. return;
  809. }
  810. Info.text = "已识别到屏幕";
  811. //if (ScreenQuadObject && ScreenQuadObject.childCount >= 4)
  812. //{
  813. // ScreenQuadObject.gameObject.SetActive(true);
  814. // for (int i = 0; i < 4; i++)
  815. // {
  816. // if (DebugOnZIMDemo)
  817. // {
  818. // RectTransform t = ScreenQuadObject.GetChild(i) as RectTransform;
  819. // t.anchoredPosition = screen.Quad[i].pixelToLocalPosition_AnchorCenter(screen.CameraSize, ScreenQuadObject.rect);
  820. // }
  821. // }
  822. //}
  823. quadUnityVectorList = screen.GetUnityVertexNormalizedList(); // 记录四个点
  824. if (!ContainsNaN(quadUnityVectorList))
  825. {
  826. SaveScreenLocateVectorList();
  827. //SyncInfraredDemo();
  828. if (DebugOnZIMDemo)
  829. SyncInfraredScreenPositioningView();
  830. InfraredCameraHelper?.InvokeOnUVCPosUpdate(quadUnityVectorList);
  831. Debug.Log("[ScreenLocate] ShowScreen 已识别到屏幕,更新quadUnityVectorList:" + PrintVector2List(quadUnityVectorList));
  832. }
  833. else
  834. {
  835. Debug.LogError("[ScreenLocate] RefreshCameraSize 屏幕size改变,存在NaN值,重新校准:" + PrintVector2List(quadUnityVectorList));
  836. }
  837. }
  838. public void ShowScreen(QuadrilateralInCamera screen) => ShowScreen(ScreenQuad, screen);
  839. /// <summary>
  840. /// 校准点位置存储到本地
  841. /// </summary>
  842. static public void SaveScreenLocateVectorList()
  843. {
  844. string saveStr = string.Join(";", quadUnityVectorList.Select(v => $"{v.x},{v.y}")); //,{v.z}
  845. Debug.Log("SaveScreenLocateVectorList: " + saveStr);
  846. PlayerPrefs.SetString("ScreenLocateVectorList", saveStr);
  847. }
  848. /// <summary>
  849. /// 获取本地存储校准点位置
  850. /// </summary>
  851. static public bool GetScreenLocateVectorList()
  852. {
  853. string posListStr = PlayerPrefs.GetString("ScreenLocateVectorList", "");
  854. Debug.Log("GetScreenLocateVectorList:"+ posListStr);
  855. if (!string.IsNullOrWhiteSpace(posListStr))
  856. {
  857. quadUnityVectorList.Clear();
  858. quadUnityVectorList = posListStr.Split(';')
  859. .Select(s =>
  860. {
  861. string[] parts = s.Split(',');
  862. return new Vector2(float.Parse(parts[0]), float.Parse(parts[1]));
  863. })
  864. .ToList();
  865. return true;
  866. }
  867. else return false;
  868. }
  869. public Vector2 AdjustPointsOffset(Vector2 inputPoint,string type = "CameraLocation")
  870. {
  871. // 计算从原始中心到输入点的偏移量
  872. if (type == "CameraLocation")
  873. {
  874. CameraLocationOffset = inputPoint - screenIdentification.Screen.TransformToCamera(new Vector2(0.5f, 0.5f) * screenIdentification.Screen.UVSize);
  875. return CameraLocationOffset;
  876. }
  877. else {
  878. //ScreenUV
  879. UVOffset = inputPoint - new Vector2(0.5f, 0.5f);
  880. return UVOffset;
  881. }
  882. }
  883. /// <summary>
  884. /// 重置偏移量
  885. /// </summary>
  886. public void ResetPointsOffest() {
  887. CameraLocationOffset = Vector2.zero;
  888. UVOffset = Vector2.zero;
  889. }
  890. /// <summary>
  891. /// 这里计算一个偏移后的cameraLocatoin位置
  892. /// </summary>
  893. /// <param name="cameraLocatoin"></param>
  894. /// <returns></returns>
  895. public Vector2 GetOffsetCameraLocation(Vector2 cameraLocatoin) {
  896. return cameraLocatoin - CameraLocationOffset;
  897. }
  898. void ToMode(Mode mode)
  899. {
  900. if (this.mode == mode)
  901. return;
  902. if (mode == Mode.ScreenMap)
  903. {
  904. if (!screenIdentification.Screen.Active)
  905. {
  906. Info.text = "先定位屏幕";
  907. return;
  908. }
  909. Info.text = "按ESC退出";
  910. SetScreen(Color.black);
  911. //Info.transform.SetAsLastSibling();
  912. this.mode = Mode.ScreenMap;
  913. }
  914. else if (mode == Mode.InfraredLocate)
  915. {
  916. Info.text = screenIdentification.Screen.Active ? "已定位屏幕" : "定位屏幕失败";
  917. //Info.text = "已识别到屏幕";
  918. SetScreen(null);
  919. foreach (var i in CrosshairInScreen)
  920. i.gameObject.SetActive(false);
  921. FullScreenImage.gameObject.SetActive(false);
  922. ScreenPixelCheaker.HideImage();
  923. //Info.transform.SetSiblingIndex(transform.childCount - 4);
  924. this.mode = Mode.InfraredLocate;
  925. #if (!NDEBUG && DEBUG && ENABLE_LOG)
  926. Console.WriteLine($"{TAG} Mode.InfraredLocate:已识别到屏幕:{screenIdentification.Screen.Active}");
  927. #endif
  928. }
  929. else if (mode == Mode.ScreenLocateManual)
  930. {
  931. Info.text = "左键单击屏幕 左下角";
  932. FullScreenImage.gameObject.SetActive(true);
  933. ScreenPixelCheaker.ShowImage();
  934. //Info.transform.SetSiblingIndex(transform.childCount - 1);
  935. // var newTex = WebCamera.webCamTexture.AutoLight(10);
  936. //DebugTexture(1, TextureToTexture2D(rawImage.texture));
  937. CreateUVCTexture2DIfNeeded();
  938. DebugTexture(7, mUVCTexture2D.zimAutoLight(brightness));
  939. //mUVCTexture2DTemp = TextureToTexture2D(mUVCCameraInfo.previewTexture);
  940. //DebugTexture(6, mUVCTexture2DTemp.zimAutoLight(brightness));
  941. this.mode = Mode.ScreenLocateManual;
  942. }
  943. }
  944. private Texture2D TextureToTexture2D(Texture texture, int width = 0, int height = 0)
  945. {
  946. if (width == 0)
  947. width = texture.width;
  948. if (height == 0)
  949. height = texture.height;
  950. Texture2D _texture2D = new Texture2D(width, height, TextureFormat.ARGB32, false, true);
  951. RenderTexture currentRT = RenderTexture.active;
  952. RenderTexture renderTexture = RenderTexture.GetTemporary(
  953. width,
  954. height,
  955. 0,
  956. RenderTextureFormat.ARGB32,
  957. RenderTextureReadWrite.Linear);
  958. Graphics.Blit(texture, renderTexture);
  959. RenderTexture.active = renderTexture;
  960. _texture2D.ReadPixels(new Rect(0, 0, width, height), 0, 0);
  961. _texture2D.Apply();
  962. RenderTexture.active = currentRT;
  963. RenderTexture.ReleaseTemporary(renderTexture);
  964. return _texture2D;
  965. }
  966. //public void CreateUVCTexture2DFocusSizeIfNeeded(int width, int height)
  967. //{
  968. // if (mUVCTexture2D != null)
  969. // Destroy(mUVCTexture2D);
  970. // mUVCTexture2D = TextureToTexture2D(mUVCTexture, width, height);
  971. //}
  972. /// <summary>
  973. /// 使用默认的mUVCTexture宽高
  974. /// </summary>
  975. private void CreateUVCTexture2DIfNeeded()
  976. {
  977. if (mUVCTexture2D != null)
  978. Destroy(mUVCTexture2D);
  979. mUVCTexture2D = TextureToTexture2D(mUVCTexture);
  980. }
  981. /// <summary>
  982. /// 根据宽高调整mUVCTexture2D
  983. /// </summary>
  984. /// <param name="width"></param>
  985. /// <param name="height"></param>
  986. private void CreateUVCTexture2DIfNeeded(int width = 0, int height = 0)
  987. {
  988. if (mUVCTexture2D != null)
  989. Destroy(mUVCTexture2D);
  990. mUVCTexture2D = TextureToTexture2D(mUVCTexture, width, height);
  991. }
  992. #region DoubleButton
  993. private DateTime m_firstTime;
  994. private DateTime m_secondTime;
  995. private void Press()
  996. {
  997. Debug.Log("进入手动定位");
  998. BtnScreenLocateManual();
  999. resetTime();
  1000. }
  1001. public void OnDoubleClick()
  1002. {
  1003. //超时重置
  1004. if (!m_firstTime.Equals(default(DateTime)))
  1005. {
  1006. var intervalTime = DateTime.Now - m_firstTime;
  1007. float milliSeconds = intervalTime.Seconds * 1000 + intervalTime.Milliseconds;
  1008. if (milliSeconds >= 400)
  1009. resetTime();
  1010. }
  1011. // 按下按钮时对两次的时间进行记录
  1012. if (m_firstTime.Equals(default(DateTime)))
  1013. m_firstTime = DateTime.Now;
  1014. else
  1015. m_secondTime = DateTime.Now;
  1016. // 在第二次点击触发,时差小于400ms触发
  1017. if (!m_firstTime.Equals(default(DateTime)) && !m_secondTime.Equals(default(DateTime)))
  1018. {
  1019. var intervalTime = m_secondTime - m_firstTime;
  1020. float milliSeconds = intervalTime.Seconds * 1000 + intervalTime.Milliseconds;
  1021. if (milliSeconds < 400)
  1022. Press();
  1023. else
  1024. resetTime();
  1025. }
  1026. }
  1027. private void resetTime()
  1028. {
  1029. m_firstTime = default(DateTime);
  1030. m_secondTime = default(DateTime);
  1031. }
  1032. #endregion
  1033. #region 性能检测相关
  1034. void InvalidateTimings()
  1035. {
  1036. m_ValidHistoryFrames = 0;
  1037. m_AverageTime = float.NaN;
  1038. m_MedianTime = float.NaN;
  1039. m_MinTime = float.NaN;
  1040. m_MaxTime = float.NaN;
  1041. }
  1042. void UpdateInputs()
  1043. {
  1044. //重置
  1045. if (Input.GetKeyDown(KeyCode.UpArrow))
  1046. {
  1047. InvalidateTimings();
  1048. }
  1049. }
  1050. #endregion
  1051. }