ScreenLocate.cs 47 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247
  1. #define ENABLE_LOG
  2. using InfraredManager;
  3. using o0;
  4. using SLAMUVC;
  5. using System;
  6. using System.Collections;
  7. using System.Collections.Generic;
  8. using System.Linq;
  9. using UnityEngine;
  10. using UnityEngine.Experimental.AI;
  11. using UnityEngine.UI;
  12. using ZIM;
  13. using ZIM.Unity;
  14. using static SLAMUVC.UVCManager;
  15. using Color = UnityEngine.Color;
  16. using Time = UnityEngine.Time;
  17. [RequireComponent(typeof(Canvas))]
  18. public partial class ScreenLocate : MonoBehaviour
  19. {
  20. public InfraredCameraHelper InfraredCameraHelper;
  21. private const string TAG = "ScreenLocate#";
  22. public enum InfraredCount : int
  23. {
  24. Single = 1,
  25. Double = 2
  26. }
  27. enum Mode
  28. {
  29. InfraredLocate,
  30. ScreenMap,
  31. ScreenLocateManual
  32. }
  33. enum Platform
  34. {
  35. Window,
  36. Android
  37. }
  38. Platform mPlatform = Platform.Android;
  39. public enum ScreenIdentificationTag
  40. {
  41. // 屏幕定位的方式,手动、半自动、自动
  42. Manual,
  43. SemiAuto,
  44. Auto
  45. }
  46. // 2个灯,顺序根据红外灯的大小 由大到小, 坐标通过 InfraredSpot.ScreenUV 和 InfraredSpot.CameraLocation 获得
  47. public InfraredSpot[] InfraredSpots
  48. {
  49. get
  50. {
  51. infraredCount = InfraredCount.Double;
  52. return infraredSpotBuffer;
  53. }
  54. }
  55. // 1个灯, 坐标通过 InfraredSpot.ScreenUV 和 InfraredSpot.CameraLocation 获得
  56. public InfraredSpot InfraredSpotSingle
  57. {
  58. get
  59. {
  60. infraredCount = InfraredCount.Single;
  61. return infraredSpotBuffer[0];
  62. }
  63. }
  64. public InfraredSpot[] infraredSpotBuffer;
  65. public string GetInfraredCount() { return infraredCount.ToString(); }
  66. /// <summary>
  67. /// 定位之后,可能有3种结果(手动、半自动、自动),从中选择一种作为最终识别到的屏幕。
  68. /// 如果选择的是null,即没有识别到屏幕,则返回false,否则返回true
  69. /// </summary>
  70. public bool SelectScreenAfterLocate(ScreenIdentificationTag tag) => ScreenIdentification.SelectScreenAfterLocate(tag);
  71. /// <summary>
  72. /// 上一次半自动识别的情况, 还未识别的时候数组是null
  73. /// 通过索引获取布尔值,false代表这条边识别失败(回退应用了手动数据), 0-下、1-右、2-上、3-左
  74. /// </summary>
  75. public bool[] LastQuadSemiAutoState() => screenIdentification.LastQuadSemiAutoState;
  76. /// <summary>
  77. /// 获取算法执行过程中输出的纹理,0原图,1半自动识别到的全部线段,2屏幕黑白色差,3识别结果,4屏幕色差叠加识别结果,5半自动时的备选线段
  78. /// </summary>
  79. public Texture2D[] OutputTextures => outputTexture2D;
  80. /// <summary>
  81. /// CameraLocation 的偏移量
  82. /// </summary>
  83. public Vector2 CameraLocationOffset { get; set; } = new Vector2(0, 0);
  84. public Vector2 UVOffset { get; set; } = new Vector2(0, 0);
  85. // public InfraredDemo InfraredDemoMain => FindObjectOfType<InfraredDemo>();
  86. #region UVC 处理的对象
  87. //public UVCManager mUVCManager;
  88. public CameraInfo mUVCCameraInfo;
  89. public bool getUVCCameraInfo => mUVCCameraInfo != null ? true : false;
  90. public Vector2 getUVCCameraInfoSize => getUVCCameraInfo ? mUVCCameraInfo.Size : new Vector2(320, 240);
  91. private Texture mUVCTexture;
  92. public Texture getUVCTexture => mUVCTexture;
  93. public Texture setUVCTexture
  94. {
  95. set
  96. {
  97. mUVCTexture = value;
  98. }
  99. }
  100. private Texture2D mUVCTexture2D;
  101. // [SerializeField] Texture2DArray mUVCOutArray;
  102. #endregion
  103. public Text Info;
  104. public List<RectTransform> CrosshairInCamera;
  105. public List<RectTransform> CrosshairInScreen;
  106. public RectTransform ScreenQuad;
  107. public Toggle SaveToggle;
  108. public Toggle FullScreenToggle;
  109. public LineGenerator UILineGenerator;
  110. public bool ShowScreenQuad = false;
  111. // 显示在demo上的rawImage
  112. public List<RawImage> outputRawImages;
  113. readonly Texture2D[] outputTexture2D = new Texture2D[8];
  114. public RawImage FullScreenImage;
  115. public PixelCheaker ScreenPixelCheaker;
  116. public InfraredSpotSettings InfraredSpotSettings;
  117. // 全局记录当前算法中的CameraSize,红外识别和屏幕识别都会使用到
  118. public o0.Geometry2D.Vector<int> CameraSize { get; set; }
  119. public List<Texture2D> DebugScreenImages = new List<Texture2D>();
  120. public bool DebugOnZIMDemo = false;
  121. // private SynchronizationContext mainContext;
  122. //是否单点显示
  123. public bool bSinglePoint = true;//默认单点识别
  124. [NonSerialized] public InfraredCount infraredCount = InfraredCount.Single; // 识别红外灯的数量,1个或者2个
  125. public float ReDoLocateCalibrationRatio { get; private set; } // 半自动定位时校准的距离比例,以手动的结果来校准,离手动太远的线段会被舍弃
  126. bool bIdentifyRed = true;//默认设备红色
  127. bool bIdentifyGreen = true;
  128. #region 性能检测相关
  129. public Text m_UITime;
  130. const float m_UIUpdateInterval = 0.1f;
  131. float m_UIUpdateTimer = 0.0f;
  132. List<float> m_History = new List<float>(100);
  133. int m_ValidHistoryFrames = 0;
  134. float m_AverageTime = float.NaN;
  135. float m_MedianTime = float.NaN;
  136. float m_MinTime = float.NaN;
  137. float m_MaxTime = float.NaN;
  138. public float updateInterval = 0.5F;
  139. private double lastInterval;
  140. private int frames = 0;
  141. private float fps;
  142. public Text m_FPS;
  143. #endregion
  144. #region PC部分参数
  145. //亮度
  146. public float pcBrightness { get; set; } = 0.0f;
  147. //对比度
  148. public float pcContrast { get; set; } = 0.0f;
  149. #endregion
  150. // 红外灯识别算法
  151. InfraredLocate infraredLocate;
  152. // 屏幕识别算法
  153. o0.Project.ScreenIdentification screenIdentification;
  154. public o0.Project.ScreenIdentification ScreenIdentification => screenIdentification;
  155. RectTransform canvas;
  156. Mode mode;
  157. //List<(Vector2 pos, GameObject go)> pointManual = new List<(Vector2, GameObject)>();
  158. //o0.Project.WebCam o0WebCam = null;
  159. /// <summary>
  160. /// 正在识别的状态,自动识别时候记录
  161. /// </summary>
  162. bool bAutomaticRecognition { get; set; } = false;//进行捕获时
  163. bool bAutomaticRecognitionStart { get; set; } = false;//是否进行捕获
  164. bool bAutomaticRecognitionEnd { get; set; } = false;//是否结束捕获
  165. [NonSerialized] public RectTransform BackQuad = null;
  166. static public ScreenLocate Main { get; private set; }
  167. static public void AutoLightPixels(Color[] pixels, int width, int height)
  168. {
  169. if (Main.DebugOnZIMDemo)
  170. {
  171. var newTex = pixels.zimAutoLightSimple(width, height);
  172. DebugTexture(7, newTex);
  173. try
  174. {
  175. Main.FullScreenImage.texture = newTex;
  176. }
  177. catch { }
  178. }
  179. }
  180. static public void DebugTexture(int index, Texture2D texture)
  181. {
  182. LateDestory(Main.outputTexture2D[index]);
  183. Main.outputTexture2D[index] = texture;
  184. try
  185. {
  186. Main.outputRawImages[index].texture = texture;
  187. }
  188. catch { }
  189. }
  190. static void LateDestory(UnityEngine.Object o) => Main.StartCoroutine(Main.LateDestoryIEnum(o));
  191. static public void SetScreen(UnityEngine.Color? color = null)
  192. {
  193. if (Main.BackQuad == null)
  194. {
  195. var canvas = GameObject.Find("WebCameraView").GetComponent<RectTransform>();
  196. var background = canvas.Find("Background");
  197. Main.BackQuad = background.GetChild(0).GetComponent<RectTransform>();
  198. }
  199. Main.BackQuad.parent.gameObject.SetActive(color != null);
  200. Main.BackQuad.GetComponent<RawImage>().color = color ?? Color.black;
  201. //Debug.Log("Set Screen " + color.GetColorName());
  202. }
  203. static public void SetScreen(Rect rect, UnityEngine.Color? color = null)
  204. {
  205. if (Main.BackQuad == null)
  206. {
  207. var canvas = GameObject.Find("WebCameraView").GetComponent<RectTransform>();
  208. var background = canvas.Find("Background");
  209. Main.BackQuad = background.GetChild(0).GetComponent<RectTransform>();
  210. }
  211. Main.BackQuad.parent.gameObject.SetActive(color != null);
  212. Main.BackQuad.anchorMin = rect.min;
  213. Main.BackQuad.anchorMax = rect.max;
  214. Main.BackQuad.GetComponent<RawImage>().color = color ?? Color.black;
  215. //Debug.Log("Set Screen " + color.GetColorName());
  216. }
  217. static void DebugBackQuad(Rect? rect = null)
  218. {
  219. if (Main.BackQuad)
  220. {
  221. Main.BackQuad.parent.GetComponent<RawImage>().enabled = false;
  222. Main.BackQuad.GetComponent<RawImage>().color = Color.white;
  223. Main.BackQuad.parent.gameObject.SetActive(!Main.BackQuad.parent.gameObject.activeSelf);
  224. if (rect.HasValue)
  225. {
  226. Main.BackQuad.anchorMin = rect.Value.min;
  227. Main.BackQuad.anchorMax = rect.Value.max;
  228. }
  229. }
  230. }
  231. //public void ReSizeTexture(int width, int height)
  232. //{
  233. // Debug.Log("Cur mUVCTexture Size: [" + mUVCTexture.width + "," + mUVCTexture.height + "]");
  234. // if (mUVCTexture.width < width || mUVCTexture.height < height) // 如果当前分辨率太小,则重新new一个texture
  235. // {
  236. // Texture2D tex = new Texture2D(
  237. // width, height,
  238. // TextureFormat.ARGB32,
  239. // false, /* mipmap */
  240. // true /* linear */);
  241. // tex.filterMode = FilterMode.Point;
  242. // tex.Apply();
  243. // mUVCTexture = tex;
  244. // mUVCCameraInfo.previewTexture = tex;
  245. // var nativeTexPtr = mUVCCameraInfo.previewTexture.GetNativeTexturePtr();
  246. // }
  247. //}
  248. void Awake()
  249. {
  250. if (Main != null)
  251. throw new Exception("[ScreenLocaer] 不允许多个实例");
  252. Main = this;
  253. #if !UNITY_EDITOR_WIN
  254. DebugOnZIMDemo = false;
  255. #endif
  256. //if (mUVCDrawer)
  257. // mUVCDrawer.StartPreviewAction += UVCIsReady;
  258. }
  259. void OnDestroy()
  260. {
  261. //if (mUVCDrawer)
  262. // mUVCDrawer.StartPreviewAction -= UVCIsReady;
  263. }
  264. void Start()
  265. {
  266. //mainContext = SynchronizationContext.Current;
  267. canvas = transform.GetComponent<RectTransform>();
  268. mode = Mode.InfraredLocate;
  269. if (DebugScreenImages.Count != 0 && DebugOnZIMDemo)
  270. {
  271. screenIdentification = new o0.Project.ScreenIdentification();
  272. screenIdentification.LocateScreen();
  273. }
  274. infraredCount = InfraredCount.Single;
  275. ReDoLocateCalibrationRatio = 0.125f;
  276. #region 性能检测相关
  277. for (var i = 0; i < m_History.Capacity; ++i)
  278. {
  279. m_History.Add(0.0f);
  280. }
  281. lastInterval = Time.realtimeSinceStartup;
  282. frames = 0;
  283. #endregion
  284. }
  285. IEnumerator LateDestoryIEnum(UnityEngine.Object o)
  286. {
  287. if (o)
  288. {
  289. yield return new WaitForEndOfFrame();
  290. Destroy(o);
  291. }
  292. }
  293. //ZIMWebCamera场景使用
  294. public void WebCamIsReady(Texture texture)
  295. {
  296. mPlatform = Platform.Window;
  297. mUVCTexture = texture;
  298. mUVCCameraInfo = new CameraInfo(mUVCTexture);
  299. brightness = 0;
  300. //UVC准备好
  301. InfraredCameraHelper?.InvokeOnUVCIsReady(mUVCCameraInfo);
  302. }
  303. /// <summary>
  304. /// UVCManager 创建初始化时候,更新此函数
  305. /// </summary>
  306. /// <param name="cameraInfo"></param>
  307. public void UVCIsReady(CameraInfo cameraInfo)
  308. {
  309. mPlatform = Platform.Android;
  310. mUVCTexture = cameraInfo.previewTexture;
  311. mUVCCameraInfo = cameraInfo;
  312. Debug.Log("UVCIsReady:" + mUVCCameraInfo);
  313. //UVC准备好
  314. InfraredCameraHelper?.InvokeOnUVCIsReady(mUVCCameraInfo);
  315. }
  316. /// <summary>
  317. /// 获取新的 previewTexture
  318. /// </summary>
  319. public void UVCUpdate(bool bChange)
  320. {
  321. mUVCTexture = mUVCCameraInfo.previewTexture;
  322. Debug.Log("[ScreenLocate] UVCUpdate:" + mUVCCameraInfo + ",bChange:" + bChange);
  323. InfraredCameraHelper?.InvokeOnUVCIsUpdate();
  324. //这里判断是否进入自动识别?
  325. if (bAutomaticRecognitionStart)
  326. {
  327. bAutomaticRecognitionStart = false;
  328. Debug.Log("[ScreenLocate] UVCUpdate 开始自动识别 Capture:" + Capture + " ,Delay: " + Delay);
  329. screenIdentification.LocateScreen(Capture, Delay);
  330. }
  331. if (bAutomaticRecognitionEnd)
  332. {
  333. bAutomaticRecognitionEnd = false;
  334. Debug.Log("[ScreenLocate] UVCUpdate 结束捕获,当前摄像机分辨率为: " + mUVCCameraInfo.Size);
  335. bAutomaticRecognition = false;
  336. }
  337. }
  338. /// <summary>
  339. /// 选择模式后更新 quadUnityVectorList
  340. /// </summary>
  341. public void UpdateQuadUnityVectorList()
  342. {
  343. quadUnityVectorList = screenIdentification.Screen.QuadInCamera.GetUnityVertexNormalizedList();
  344. SaveScreenLocateVectorList();
  345. }
  346. int brightness = 0;
  347. /// <summary>
  348. /// 设置算法红外灯的亮度值
  349. /// </summary>
  350. /// <param name="value"></param>
  351. public void SetInfraredLocateBrightnessThreshold(float value)
  352. {
  353. if (infraredLocate != null)
  354. {
  355. if (value >= 0 && value <= 1)
  356. infraredLocate.SetBrightnessThreshold(value); // 参数是 红外灯的亮度阈值,阈值越小能够检测到的亮度就越低,默认值是0.93
  357. }
  358. }
  359. void Update()
  360. {
  361. //++frames;
  362. //float timeNow = Time.realtimeSinceStartup;
  363. //if (timeNow > lastInterval + updateInterval)
  364. //{
  365. // fps = (float)(frames / (timeNow - lastInterval));
  366. // frames = 0;
  367. // lastInterval = timeNow;
  368. //}
  369. //if (m_FPS != null)
  370. // m_FPS.text = "FPS:" + fps.ToString("f2");
  371. if (mUVCCameraInfo == null) return;
  372. if (screenIdentification == null)
  373. {
  374. screenIdentification = new o0.Project.ScreenIdentification();
  375. //pc 不切换分辨率了
  376. #if UNITY_ANDROID
  377. //screenIdentification.OnLocateScreenEnter += OnLocateScreenEnter;
  378. screenIdentification.OnLocateScreenEnd += OnLocateScreenEnd;
  379. #endif
  380. }
  381. if (infraredLocate == null)
  382. {
  383. infraredLocate = new InfraredLocate(mUVCCameraInfo, screenIdentification, InfraredSpotSettings, ScreenPixelCheaker);
  384. InfraredSpot.RefreshMinVerifyLength(new o0.Geometry2D.Float.Vector(getUVCCameraInfoSize.x, getUVCCameraInfoSize.y));
  385. //InfraredDemo 初始化
  386. //float redfilterValue = PlayerPrefs.GetFloat("Init redFilterSliderValue", 0.8f);
  387. //Debug.Log("Init Red filterValue:" + redfilterValue);
  388. //infraredLocate.SetBrightnessThreshold(redfilterValue); // 参数是 红外灯的亮度阈值,阈值越小能够检测到的亮度就越低,默认值是0.93
  389. }
  390. /* New*/
  391. //Debug.Log((mUVCCameraInfo != null) +" = "+ mUVCCameraInfo.IsPreviewing + " = "+ screenIdentification.Screen.Active);
  392. if (mUVCCameraInfo != null && mUVCCameraInfo.IsPreviewing)
  393. {
  394. //if (bAutomaticRecognition)
  395. //{
  396. // //识别的过程使用的分辨率
  397. // //CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  398. // if (log1)
  399. // {
  400. // log1 = false;
  401. // Debug.Log("[ScreenLocate] log1:[" + (int)getUVCCameraInfoSize.x + ", " + (int)getUVCCameraInfoSize.y + "]");
  402. // }
  403. //}
  404. //else
  405. //{
  406. // //自动识别完成后使用相机分辨率大小 getUVCCameraInfoSize
  407. // //CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  408. // if (log2)
  409. // {
  410. // log2 = false;
  411. // Debug.Log("[ScreenLocate] log2:[" + (int)getUVCCameraInfoSize.x + ", " + (int)getUVCCameraInfoSize.y + "]");
  412. // }
  413. //}
  414. //如果是连接了蓝牙设备,并且不是9轴设备。不进行识别算法处理
  415. if (BluetoothAim.ins?.status == BluetoothStatusEnum.ConnectSuccess && AimHandler.ins && AimHandler.ins.bRuning9Axis()) return;
  416. //根据getUVCCameraInfoSize 分辨率渲染
  417. CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  418. if (!screenIdentification.Update(mUVCTexture2D))
  419. {
  420. // 同步分辨率, 分辨率变化后还需同步到InfraredDemo
  421. if (RefreshCameraSize())
  422. {
  423. if (screenIdentification.Screen.QuadInCamera != null)
  424. {
  425. quadUnityVectorList = screenIdentification.Screen.QuadInCamera.GetUnityVertexNormalizedList();
  426. if (!ContainsNaN(quadUnityVectorList))
  427. {
  428. SaveScreenLocateVectorList();
  429. //SyncInfraredDemo();
  430. //SyncInfraredScreenPositioningView();
  431. InfraredCameraHelper?.InvokeOnUVCPosUpdate(quadUnityVectorList);
  432. Debug.Log("[ScreenLocate] RefreshCameraSize 屏幕size改变:[" + (int)getUVCCameraInfoSize.x + "," + (int)getUVCCameraInfoSize.y + "]");
  433. Debug.Log("[ScreenLocate] RefreshCameraSize 屏幕size改变,刷新quadUnityVectorList:" + PrintVector2List(quadUnityVectorList));
  434. }
  435. else
  436. {
  437. Debug.LogError("[ScreenLocate] RefreshCameraSize 屏幕size改变,存在NaN值,重新校准:" + PrintVector2List(quadUnityVectorList));
  438. }
  439. }
  440. if (DebugOnZIMDemo)
  441. Main.ShowScreen(screenIdentification.Screen.QuadInCamera);
  442. }
  443. // 获取像素,用于后续操作
  444. var pixels = mUVCTexture2D.GetPixels(); // 从左往右、从下往上
  445. AutoLightPixels(pixels, CameraSize.x, CameraSize.y);
  446. if (bSinglePoint)
  447. infraredSpotBuffer = infraredLocate.UpdateSingle(pixels);
  448. else
  449. infraredSpotBuffer = infraredLocate.Update(pixels);
  450. if (mode == Mode.ScreenLocateManual)
  451. {
  452. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  453. {
  454. if (infraredSpotBuffer[i].CameraLocation != null)
  455. {
  456. // 检测到光点
  457. var posInCanvas = infraredSpotBuffer[i].CameraLocation.Value.pixelToLocalPosition_AnchorCenter(CameraSize, FullScreenImage.rectTransform.rect);
  458. CrosshairInCamera[i].gameObject.SetActive(true);
  459. CrosshairInCamera[i].anchoredPosition = posInCanvas;
  460. }
  461. else
  462. CrosshairInCamera[i].gameObject.SetActive(false);
  463. }
  464. }
  465. else if (mode == Mode.InfraredLocate)
  466. {
  467. if (mPlatform == Platform.Window) //渲染ui上面的点。进入游戏可以隐藏
  468. {
  469. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  470. {
  471. if (infraredSpotBuffer[i].CameraLocation != null)
  472. {
  473. // 检测到光点
  474. var posInCanvas = infraredSpotBuffer[i].CameraLocation.Value.pixelToLocalPosition_AnchorCenter(CameraSize, outputRawImages[0].rectTransform.rect);
  475. CrosshairInCamera[i].gameObject.SetActive(true);
  476. CrosshairInCamera[i].anchoredPosition = posInCanvas;
  477. }
  478. else
  479. CrosshairInCamera[i].gameObject.SetActive(false);
  480. }
  481. }
  482. //手机端使用 mPlatform == Platform.Android &&
  483. //通用,手机 和 PC
  484. if (infraredSpotBuffer.Length > 0)
  485. {
  486. int redIndex = 0;
  487. int greenIndex = 1;
  488. //仅仅第一个点显示(如果最大点出界了会闪烁)
  489. if (bSinglePoint)
  490. {
  491. redIndex = 0; //单点识别是,可以选择切换颜色
  492. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  493. {
  494. string str = "Single:";
  495. Info.text = str + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  496. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  497. onFilterPos(infraredSpotBuffer[redIndex].ScreenUV.Value);
  498. }
  499. }
  500. else
  501. {
  502. //雙點模式下選擇第一個點
  503. if (bIdentifyRed && !bIdentifyGreen)
  504. {
  505. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  506. {
  507. Info.text = "Red" + redIndex + ":" + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  508. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  509. onFilterPos2(infraredSpotBuffer[redIndex].ScreenUV.Value, redIndex);
  510. }
  511. else
  512. {
  513. Info.text = "未检测到红色最大点!";
  514. }
  515. }
  516. else if (!bIdentifyRed && bIdentifyGreen)
  517. {
  518. if (infraredSpotBuffer[greenIndex].ScreenUV != null)
  519. {
  520. Info.text = "Green:" + infraredSpotBuffer[greenIndex].ScreenUV.Value.ToString("F4");
  521. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[greenIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[greenIndex].ScreenUV.Value.y * Screen.height, 0));
  522. onFilterPos2(infraredSpotBuffer[greenIndex].ScreenUV.Value, greenIndex);
  523. }
  524. else
  525. {
  526. Info.text = "未检测到绿色点!";
  527. }
  528. }
  529. else
  530. {
  531. //两个不选择和两个全选都跑识别两个点
  532. //自動切換 检测到光点
  533. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  534. {
  535. Info.text = "Red:" + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  536. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  537. onFilterPos2(infraredSpotBuffer[redIndex].ScreenUV.Value, redIndex);
  538. }
  539. else if (infraredSpotBuffer[greenIndex].ScreenUV != null)
  540. {
  541. Info.text = "Green:" + infraredSpotBuffer[greenIndex].ScreenUV.Value.ToString("F4");
  542. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[greenIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[greenIndex].ScreenUV.Value.y * Screen.height, 0));
  543. onFilterPos2(infraredSpotBuffer[greenIndex].ScreenUV.Value, greenIndex);
  544. }
  545. else
  546. {
  547. Info.text = "未检测到点!";
  548. }
  549. }
  550. }
  551. }
  552. }
  553. else if (mode == Mode.ScreenMap && DebugOnZIMDemo)
  554. {
  555. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  556. {
  557. if (infraredSpotBuffer[i].ScreenUV != null)
  558. {
  559. // 检测到光点
  560. var posInCanvas = infraredSpotBuffer[i].ScreenUV.Value.pixelToLocalPosition_AnchorCenter(new Vector2(1, 1), canvas.rect);
  561. CrosshairInScreen[i].gameObject.SetActive(true);
  562. CrosshairInScreen[i].anchoredPosition = posInCanvas;
  563. }
  564. else
  565. CrosshairInScreen[i].gameObject.SetActive(false);
  566. }
  567. if (Input.GetKeyDown(KeyCode.Escape))
  568. ToMode(Mode.InfraredLocate);
  569. }
  570. }
  571. }
  572. //var t1 = Time.realtimeSinceStartup;
  573. //var dt = t1 - t0;
  574. //m_History[m_ValidHistoryFrames % m_History.Count] = dt;
  575. //++m_ValidHistoryFrames;
  576. //m_UIUpdateTimer += Time.deltaTime;
  577. //if (m_UIUpdateTimer >= m_UIUpdateInterval)
  578. //{
  579. // m_UIUpdateTimer = 0.0f;
  580. // if (m_ValidHistoryFrames >= m_History.Count)
  581. // {
  582. // m_ValidHistoryFrames = 0;
  583. // m_AverageTime = 0.0f;
  584. // m_MinTime = float.PositiveInfinity;
  585. // m_MaxTime = float.NegativeInfinity;
  586. // {
  587. // for (var i = 0; i < m_History.Count; i++)
  588. // {
  589. // var time = m_History[i];
  590. // m_AverageTime += time;
  591. // m_MinTime = Mathf.Min(m_MinTime, time);
  592. // m_MaxTime = Mathf.Max(m_MaxTime, time);
  593. // }
  594. // m_AverageTime /= m_History.Count;
  595. // }
  596. // {
  597. // m_History.Sort();
  598. // // Odd-length history?
  599. // if ((m_History.Count & 1) != 0)
  600. // {
  601. // m_MedianTime = m_History[m_History.Count / 2];
  602. // }
  603. // else
  604. // {
  605. // m_MedianTime = (m_History[m_History.Count / 2] + m_History[m_History.Count / 2 - 1]) / 2.0f;
  606. // }
  607. // }
  608. // }
  609. // var statistics = $"{m_History.Count} 帧样本:\naverage: {m_AverageTime * 1000.0f:F2}ms\nmedian: {m_MedianTime * 1000.0f:F2}ms\nmin: {m_MinTime * 1000.0f:F2}ms\nmax: {m_MaxTime * 1000.0f:F2}ms\n";
  610. // //Method: {m_Method} {UnityEngine.SceneManagement.SceneManager.GetActiveScene().name} |
  611. // if (m_UITime != null)
  612. // m_UITime.text = $"Cam: {mUVCCameraInfo.CurrentWidth}x{mUVCCameraInfo.CurrentHeight}{(mUVCTexture2D? ",T2D:" : "")}{(mUVCTexture2D? mUVCTexture2D.width+ "x" : "")}{(mUVCTexture2D ? mUVCTexture2D.height:"")} \nLast Frame: {dt * 1000.0f:F2}ms \n{statistics}";
  613. //}
  614. //UpdateInputs();
  615. }
  616. private bool RefreshCameraSize()
  617. {
  618. var sizeNew = new o0.Geometry2D.Vector<int>((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  619. if (sizeNew != CameraSize)
  620. {
  621. Debug.Log("[ScreenLocate] 分辨率变化,刷新分辨率");
  622. // 同步相机分辨率
  623. CameraSize = sizeNew;
  624. var sizeNewFloat = getUVCCameraInfoSize.o0Vector();
  625. screenIdentification.Screen.RefreshCameraSize(sizeNewFloat);
  626. screenIdentification.QuadAuto?.ReSize(sizeNewFloat, ScreenMap.ViewAspectRatioSetting);
  627. screenIdentification.QuadManual?.ReSize(sizeNewFloat, ScreenMap.ViewAspectRatioSetting);
  628. screenIdentification.QuadSemiAuto?.ReSize(sizeNewFloat, ScreenMap.ViewAspectRatioSetting);
  629. InfraredSpot.RefreshMinVerifyLength(sizeNewFloat);
  630. return true;
  631. }
  632. return false;
  633. }
  634. Vector2 targetPos = Vector2.zero;
  635. Vector2 movePos = Vector2.zero;
  636. int moveSpeed = 20;
  637. public float filterDis = 3.0f;
  638. void onFilterPos(Vector2 _vector2Pos)
  639. {
  640. //主要用于模拟九轴时候的
  641. //添加一个偏移量,使得最后输出的准心是指向正中心
  642. Vector2 np = new Vector2((_vector2Pos.x - UVOffset.x) * Screen.width, (_vector2Pos.y - UVOffset.y) * Screen.height); //_vector2Pos.pixelToLocalPosition_AnchorCenter(Vector2.one, (transform as RectTransform).rect);
  643. if (Vector2.Distance(np, targetPos) >= filterDis)
  644. {
  645. targetPos = np;
  646. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(targetPos.x, targetPos.y, 0));
  647. //Vector2 np = new Vector2(uvCenterOffset.x * Screen.width, uvCenterOffset.y * Screen.height);
  648. //point -= np;
  649. InfraredCameraHelper?.InvokeOnPositionUpdate(targetPos);
  650. }
  651. //movePos = Vector3.Lerp(movePos, targetPos, Time.deltaTime * moveSpeed);
  652. //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(movePos.x, movePos.y, 0));
  653. }
  654. Vector2[] _targetPoints2 = new Vector2[] { Vector2.zero, Vector2.zero };
  655. void onFilterPos2(Vector2 _vector2Pos, int index)
  656. {
  657. Vector2 np = new Vector2((_vector2Pos.x - UVOffset.x) * Screen.width, (_vector2Pos.y - UVOffset.y) * Screen.height);
  658. if (Vector2.Distance(np, _targetPoints2[index]) >= filterDis)
  659. {
  660. _targetPoints2[index] = np;
  661. InfraredCameraHelper.InvokeOnPositionUpdate2(_targetPoints2[index], index);
  662. }
  663. }
  664. #region 自动识别
  665. int Capture = 30;
  666. int Delay = 30;
  667. Vector2 EnterResolution;
  668. // int DefaultResolutionIndex;
  669. // readonly public int HighScreenLocateResolutionIndex = 2; // 自动识别时,摄像机分辨率固定为1280 * 720 ( 对应索引是2 )
  670. public void BtnScreenLocate()
  671. {
  672. if (DebugScreenImages.Count != 0)
  673. {
  674. screenIdentification = new o0.Project.ScreenIdentification();
  675. CameraSize = new o0.Geometry2D.Vector<int>(DebugScreenImages[0].width, DebugScreenImages[0].height);
  676. WebCamIsReady(DebugScreenImages[0]);
  677. CreateUVCTexture2DIfNeeded();
  678. }
  679. //Debug.Log("BtnScreenLocate Capture:" + Capture + " ,Delay: " + Delay);
  680. //screenIdentification.LocateScreen(Capture, Delay);
  681. OnLocateScreenEnter();
  682. }
  683. // bool log1 = false, log2 = false;
  684. public void OnLocateScreenEnter()
  685. {
  686. bAutomaticRecognition = true;
  687. bAutomaticRecognitionStart = true;
  688. ResetScreenIdentification();
  689. //DefaultResolutionIndex = InfraredDemoMain?.ResolutionIndex ?? 0; // 记录一下进入前的分辨率(游戏场景的分辨率,比识别时更低)
  690. //HighScreenLocateResolutionIndex = InfraredDemoMain.getTextureToResolutionNewIndex(); // index = 0
  691. // Debug.Log("[ScreenLocate] 开始捕获 DefaultResolutionIndex:" + DefaultResolutionIndex + " ,HighScreenLocateResolutionIndex:" + HighScreenLocateResolutionIndex);
  692. // InfraredDemoMain?.SetResolutionNew(HighScreenLocateResolutionIndex);
  693. EnterResolution = mUVCCameraInfo.Size;// 记录一下进入前的分辨率(游戏场景的分辨率,比识别时更低)
  694. Vector2 _HighResolution = mUVCCameraInfo.CurrentCalibrationResolution; //最高的分辨率
  695. Resize((int)_HighResolution.x, (int)_HighResolution.y);
  696. screenIdentification.LocateScreen();
  697. //CreateUVCTexture2DIfNeeded();
  698. // log1 = true;
  699. // log2 = true;
  700. }
  701. public void OnLocateScreenEnd()
  702. {
  703. bAutomaticRecognitionEnd = true;
  704. // 记录本次屏幕识别的分辨率(目前采用高分辨率做识别,识别结束后调回低分辨率)
  705. //InfraredDemoMain?.SetResolutionNew(DefaultResolutionIndex);
  706. Resize((int)EnterResolution.x, (int)EnterResolution.y);
  707. }
  708. /**
  709. * 修改相机的实际分辨率
  710. */
  711. public void Resize(int width, int height)
  712. {
  713. if (mUVCCameraInfo == null) return;
  714. #if UNITY_ANDROID
  715. //发送修改指令给相机实际分辨率
  716. mUVCCameraInfo.SetCameraSize(width, height);
  717. #endif
  718. #if UNITY_STANDALONE_WIN
  719. // pc todo 看看怎么处理
  720. // ResizePC(width, height);
  721. #endif
  722. //mUVCCameraInfo.SetSize(width, height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  723. Debug.Log($"[ScreenLocate] 开始修改分辨率 mUVCCameraInfo origin:[{mUVCCameraInfo.CurrentWidth},{mUVCCameraInfo.CurrentHeight}]=>target:[{width},{height}]");
  724. // if (screenIdentification.isInitLocateScreen()) screenIdentification.bStartLocateScreen = true;
  725. }
  726. /// <summary>
  727. /// pc修改分辨率
  728. /// </summary>
  729. /// <param name="width"></param>
  730. /// <param name="height"></param>
  731. public void ResizePC(int width, int height)
  732. {
  733. if (mUVCCameraInfo == null) return;
  734. //if (screenIdentification.isInitLocateScreen()) screenIdentification.bStartLocateScreen = true;
  735. // PcWebCamera pcWebCamera = GetComponent<PcWebCamera>();
  736. // if(pcWebCamera.webCamTexture == null || !pcWebCamera.webCamTexture.isPlaying) return;
  737. //StartCoroutine(ResetWebCam(pcWebCamera, width, height));
  738. mUVCCameraInfo.SetSize(width, height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  739. Debug.Log("[ScreenLocate] Resize mUVCCameraInfo.SetSize: [" + mUVCCameraInfo.CurrentWidth + "," + mUVCCameraInfo.CurrentHeight + "]");
  740. }
  741. private System.Collections.IEnumerator ResetWebCam(PcWebCamera pcWebCamera, int newWidth, int newHeight)
  742. {
  743. WebCamTexture _webCamTexture = pcWebCamera.webCamTexture;
  744. // Stop the current WebCamTexture
  745. _webCamTexture.Stop();
  746. // Trigger OnWebCamStopped event
  747. // OnWebCamStopped?.Invoke();
  748. // Wait for a short time to ensure resources are released
  749. yield return new WaitForSeconds(0.5f);
  750. // Create a new WebCamTexture with the new dimensions
  751. _webCamTexture = new WebCamTexture(newWidth, newHeight);
  752. pcWebCamera.webCamTexture = _webCamTexture;
  753. mUVCTexture = _webCamTexture;
  754. // Restart the camera
  755. yield return StartCoroutine(StartWebCam(pcWebCamera));
  756. }
  757. private System.Collections.IEnumerator StartWebCam(PcWebCamera pcWebCamera)
  758. {
  759. WebCamTexture _webCamTexture = pcWebCamera.webCamTexture;
  760. _webCamTexture.Play();
  761. // Wait until the WebCamTexture is playing
  762. while (!_webCamTexture.isPlaying)
  763. {
  764. yield return null;
  765. }
  766. // Trigger OnWebCamStarted event
  767. //OnWebCamStarted?.Invoke();
  768. mUVCCameraInfo.SetSize(_webCamTexture.width, _webCamTexture.height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  769. Debug.Log("[ScreenLocate] ResizePc mUVCCameraInfo.SetSize: [" + mUVCCameraInfo.CurrentWidth + "," + mUVCCameraInfo.CurrentHeight + "]");
  770. // if(screenIdentification.isInitLocateScreen())screenIdentification.bStartLocateScreen = true;
  771. }
  772. #endregion
  773. public void BtnScreenMap()
  774. {
  775. ToMode(Mode.ScreenMap);
  776. }
  777. //进入手动定位屏幕
  778. public void BtnScreenLocateManual()
  779. {
  780. ToMode(Mode.ScreenLocateManual);
  781. }
  782. // 重置屏幕识别的数据
  783. public void ResetScreenIdentification()
  784. {
  785. screenIdentification.Screen.Active = false;
  786. }
  787. // threshold 的值是0-1,0代表最近,1代表最远
  788. public void SetReDoLocateCalibrationRatio(float threshold)
  789. {
  790. const float MIN = 0.005f;
  791. const float MAX = 0.305f;
  792. ReDoLocateCalibrationRatio = MIN + (MAX - MIN) * threshold;
  793. }
  794. /// <summary>
  795. /// 固定的顶点顺序: 左下,右下,左上,右上
  796. /// </summary>
  797. public static List<Vector2> quadUnityVectorList = new();
  798. /// <summary>
  799. /// 打印信息
  800. /// </summary>
  801. /// <param name="list">左下,右下,左上,右上</param>
  802. /// <returns></returns>
  803. public string PrintVector2List(List<Vector2> list)
  804. {
  805. if (screenIdentification == null || !screenIdentification.Screen.Active) return "[]";
  806. string result = "";
  807. if (list.Count == 4)
  808. {
  809. result = "左下" + list[0].ToString() + ",右下" + list[1].ToString() + ",左上" + list[2].ToString() + ",右上" + list[3].ToString();
  810. }
  811. else
  812. {
  813. result = "count != 4 error";
  814. }
  815. //foreach (Vector2 vector in list)
  816. //{
  817. // result += vector.ToString() + " ";
  818. //}
  819. //Debug.Log(result);
  820. return result;
  821. }
  822. /// <summary>
  823. /// 判断是否存在NaN
  824. /// </summary>
  825. /// <param name="vectors"></param>
  826. /// <returns></returns>
  827. public bool ContainsNaN(List<Vector2> vectors)
  828. {
  829. foreach (var v in vectors)
  830. {
  831. if (float.IsNaN(v.x) || float.IsNaN(v.y))
  832. {
  833. return true;
  834. }
  835. }
  836. return false;
  837. }
  838. // 标记屏幕的四个角, ScreenQuadObject 下挂了4个子节点用于标记
  839. public void ShowScreen(RectTransform ScreenQuadObject, QuadrilateralInCamera screen)
  840. {
  841. if (screen == null)
  842. {
  843. Info.text = "识别屏幕失败";
  844. return;
  845. }
  846. Info.text = "已识别到屏幕";
  847. //if (ScreenQuadObject && ScreenQuadObject.childCount >= 4)
  848. //{
  849. // ScreenQuadObject.gameObject.SetActive(true);
  850. // for (int i = 0; i < 4; i++)
  851. // {
  852. // if (DebugOnZIMDemo)
  853. // {
  854. // RectTransform t = ScreenQuadObject.GetChild(i) as RectTransform;
  855. // t.anchoredPosition = screen.Quad[i].pixelToLocalPosition_AnchorCenter(screen.CameraSize, ScreenQuadObject.rect);
  856. // }
  857. // }
  858. //}
  859. quadUnityVectorList = screen.GetUnityVertexNormalizedList(); // 记录四个点
  860. if (!ContainsNaN(quadUnityVectorList))
  861. {
  862. SaveScreenLocateVectorList();
  863. //SyncInfraredDemo();
  864. if (DebugOnZIMDemo)
  865. SyncInfraredScreenPositioningView();
  866. InfraredCameraHelper?.InvokeOnUVCPosUpdate(quadUnityVectorList);
  867. Debug.Log("[ScreenLocate] ShowScreen 已识别到屏幕,更新quadUnityVectorList:" + PrintVector2List(quadUnityVectorList));
  868. }
  869. else
  870. {
  871. Debug.LogError("[ScreenLocate] RefreshCameraSize 屏幕size改变,存在NaN值,重新校准:" + PrintVector2List(quadUnityVectorList));
  872. }
  873. }
  874. public void ShowScreen(QuadrilateralInCamera screen) => ShowScreen(ScreenQuad, screen);
  875. /// <summary>
  876. /// 校准点位置存储到本地
  877. /// </summary>
  878. static public void SaveScreenLocateVectorList()
  879. {
  880. string saveStr = string.Join(";", quadUnityVectorList.Select(v => $"{v.x},{v.y}")); //,{v.z}
  881. Debug.Log("SaveScreenLocateVectorList: " + saveStr);
  882. PlayerPrefs.SetString("ScreenLocateVectorList", saveStr);
  883. }
  884. /// <summary>
  885. /// 获取本地存储校准点位置
  886. /// </summary>
  887. static public bool GetScreenLocateVectorList()
  888. {
  889. string posListStr = PlayerPrefs.GetString("ScreenLocateVectorList", "");
  890. Debug.Log("GetScreenLocateVectorList:" + posListStr);
  891. if (!string.IsNullOrWhiteSpace(posListStr))
  892. {
  893. quadUnityVectorList.Clear();
  894. quadUnityVectorList = posListStr.Split(';')
  895. .Select(s =>
  896. {
  897. string[] parts = s.Split(',');
  898. return new Vector2(float.Parse(parts[0]), float.Parse(parts[1]));
  899. })
  900. .ToList();
  901. return true;
  902. }
  903. else return false;
  904. }
  905. public Vector2 AdjustPointsOffset(Vector2 inputPoint, string type = "CameraLocation")
  906. {
  907. // 计算从原始中心到输入点的偏移量
  908. if (type == "CameraLocation")
  909. {
  910. CameraLocationOffset = inputPoint - screenIdentification.Screen.TransformToCamera(new Vector2(0.5f, 0.5f) * screenIdentification.Screen.UVSize);
  911. return CameraLocationOffset;
  912. }
  913. else
  914. {
  915. //ScreenUV
  916. UVOffset = inputPoint - new Vector2(0.5f, 0.5f);
  917. return UVOffset;
  918. }
  919. }
  920. /// <summary>
  921. /// 重置偏移量
  922. /// </summary>
  923. public void ResetPointsOffest()
  924. {
  925. CameraLocationOffset = Vector2.zero;
  926. UVOffset = Vector2.zero;
  927. }
  928. /// <summary>
  929. /// 这里计算一个偏移后的cameraLocatoin位置
  930. /// </summary>
  931. /// <param name="cameraLocatoin"></param>
  932. /// <returns></returns>
  933. public Vector2 GetOffsetCameraLocation(Vector2 cameraLocatoin)
  934. {
  935. return cameraLocatoin - CameraLocationOffset;
  936. }
  937. void ToMode(Mode mode)
  938. {
  939. if (this.mode == mode)
  940. return;
  941. if (mode == Mode.ScreenMap)
  942. {
  943. if (!screenIdentification.Screen.Active)
  944. {
  945. Info.text = "先定位屏幕";
  946. return;
  947. }
  948. Info.text = "按ESC退出";
  949. SetScreen(Color.black);
  950. //Info.transform.SetAsLastSibling();
  951. this.mode = Mode.ScreenMap;
  952. }
  953. else if (mode == Mode.InfraredLocate)
  954. {
  955. Info.text = screenIdentification.Screen.Active ? "已定位屏幕" : "定位屏幕失败";
  956. //Info.text = "已识别到屏幕";
  957. SetScreen(null);
  958. foreach (var i in CrosshairInScreen)
  959. i.gameObject.SetActive(false);
  960. FullScreenImage.gameObject.SetActive(false);
  961. ScreenPixelCheaker.HideImage();
  962. //Info.transform.SetSiblingIndex(transform.childCount - 4);
  963. this.mode = Mode.InfraredLocate;
  964. #if (!NDEBUG && DEBUG && ENABLE_LOG)
  965. Console.WriteLine($"{TAG} Mode.InfraredLocate:已识别到屏幕:{screenIdentification.Screen.Active}");
  966. #endif
  967. }
  968. else if (mode == Mode.ScreenLocateManual)
  969. {
  970. Info.text = "左键单击屏幕 左下角";
  971. FullScreenImage.gameObject.SetActive(true);
  972. ScreenPixelCheaker.ShowImage();
  973. //Info.transform.SetSiblingIndex(transform.childCount - 1);
  974. // var newTex = WebCamera.webCamTexture.AutoLight(10);
  975. //DebugTexture(1, TextureToTexture2D(rawImage.texture));
  976. CreateUVCTexture2DIfNeeded();
  977. DebugTexture(7, mUVCTexture2D.zimAutoLight(brightness));
  978. //mUVCTexture2DTemp = TextureToTexture2D(mUVCCameraInfo.previewTexture);
  979. //DebugTexture(6, mUVCTexture2DTemp.zimAutoLight(brightness));
  980. this.mode = Mode.ScreenLocateManual;
  981. }
  982. }
  983. private Texture2D TextureToTexture2D(Texture texture, int width = 0, int height = 0)
  984. {
  985. if (width == 0)
  986. width = texture.width;
  987. if (height == 0)
  988. height = texture.height;
  989. Texture2D _texture2D = new Texture2D(width, height, TextureFormat.ARGB32, false, true);
  990. RenderTexture currentRT = RenderTexture.active;
  991. RenderTexture renderTexture = RenderTexture.GetTemporary(
  992. width,
  993. height,
  994. 0,
  995. RenderTextureFormat.ARGB32,
  996. RenderTextureReadWrite.Linear);
  997. Graphics.Blit(texture, renderTexture);
  998. RenderTexture.active = renderTexture;
  999. _texture2D.ReadPixels(new Rect(0, 0, width, height), 0, 0);
  1000. _texture2D.Apply();
  1001. RenderTexture.active = currentRT;
  1002. RenderTexture.ReleaseTemporary(renderTexture);
  1003. return _texture2D;
  1004. }
  1005. //public void CreateUVCTexture2DFocusSizeIfNeeded(int width, int height)
  1006. //{
  1007. // if (mUVCTexture2D != null)
  1008. // Destroy(mUVCTexture2D);
  1009. // mUVCTexture2D = TextureToTexture2D(mUVCTexture, width, height);
  1010. //}
  1011. /// <summary>
  1012. /// 使用默认的mUVCTexture宽高
  1013. /// </summary>
  1014. private void CreateUVCTexture2DIfNeeded()
  1015. {
  1016. if (mUVCTexture2D != null)
  1017. Destroy(mUVCTexture2D);
  1018. mUVCTexture2D = TextureToTexture2D(mUVCTexture);
  1019. }
  1020. /// <summary>
  1021. /// 根据宽高调整mUVCTexture2D
  1022. /// </summary>
  1023. /// <param name="width"></param>
  1024. /// <param name="height"></param>
  1025. private void CreateUVCTexture2DIfNeeded(int width = 0, int height = 0)
  1026. {
  1027. if (mUVCTexture2D != null)
  1028. Destroy(mUVCTexture2D);
  1029. mUVCTexture2D = TextureToTexture2D(mUVCTexture, width, height);
  1030. }
  1031. #region DoubleButton
  1032. private DateTime m_firstTime;
  1033. private DateTime m_secondTime;
  1034. private void Press()
  1035. {
  1036. Debug.Log("进入手动定位");
  1037. BtnScreenLocateManual();
  1038. resetTime();
  1039. }
  1040. public void OnDoubleClick()
  1041. {
  1042. //超时重置
  1043. if (!m_firstTime.Equals(default(DateTime)))
  1044. {
  1045. var intervalTime = DateTime.Now - m_firstTime;
  1046. float milliSeconds = intervalTime.Seconds * 1000 + intervalTime.Milliseconds;
  1047. if (milliSeconds >= 400)
  1048. resetTime();
  1049. }
  1050. // 按下按钮时对两次的时间进行记录
  1051. if (m_firstTime.Equals(default(DateTime)))
  1052. m_firstTime = DateTime.Now;
  1053. else
  1054. m_secondTime = DateTime.Now;
  1055. // 在第二次点击触发,时差小于400ms触发
  1056. if (!m_firstTime.Equals(default(DateTime)) && !m_secondTime.Equals(default(DateTime)))
  1057. {
  1058. var intervalTime = m_secondTime - m_firstTime;
  1059. float milliSeconds = intervalTime.Seconds * 1000 + intervalTime.Milliseconds;
  1060. if (milliSeconds < 400)
  1061. Press();
  1062. else
  1063. resetTime();
  1064. }
  1065. }
  1066. private void resetTime()
  1067. {
  1068. m_firstTime = default(DateTime);
  1069. m_secondTime = default(DateTime);
  1070. }
  1071. #endregion
  1072. #region 性能检测相关
  1073. void InvalidateTimings()
  1074. {
  1075. m_ValidHistoryFrames = 0;
  1076. m_AverageTime = float.NaN;
  1077. m_MedianTime = float.NaN;
  1078. m_MinTime = float.NaN;
  1079. m_MaxTime = float.NaN;
  1080. }
  1081. void UpdateInputs()
  1082. {
  1083. //重置
  1084. if (Input.GetKeyDown(KeyCode.UpArrow))
  1085. {
  1086. InvalidateTimings();
  1087. }
  1088. }
  1089. #endregion
  1090. }