ScreenLocate.cs 58 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517
  1. #define ENABLE_LOG
  2. using InfraredManager;
  3. using o0;
  4. using o0.Geometry2D.Float;
  5. using o0InfraredLocate.ZIM;
  6. using SixLabors.ImageSharp.PixelFormats;
  7. using SLAMUVC;
  8. using System;
  9. using System.Collections;
  10. using System.Collections.Generic;
  11. using System.Linq;
  12. using UnityEngine;
  13. using UnityEngine.Experimental.AI;
  14. using UnityEngine.UI;
  15. using ZIM;
  16. using ZIM.Unity;
  17. using static SLAMUVC.UVCManager;
  18. using Color = UnityEngine.Color;
  19. using Time = UnityEngine.Time;
  20. [RequireComponent(typeof(Canvas))]
  21. public partial class ScreenLocate : o0InfraredCameraHandler
  22. {
  23. public InfraredCameraHelper InfraredCameraHelper;
  24. private const string TAG = "ScreenLocate#";
  25. enum Mode
  26. {
  27. InfraredLocate,
  28. ScreenMap,
  29. ScreenLocateManual
  30. }
  31. enum Platform
  32. {
  33. Window,
  34. Android
  35. }
  36. Platform mPlatform = Platform.Android;
  37. public enum ScreenIdentificationTag
  38. {
  39. // 屏幕定位的方式,手动、半自动、自动
  40. Manual,
  41. SemiAuto,
  42. Auto
  43. }
  44. // 2个灯,顺序根据红外灯的大小 由大到小, 坐标通过 InfraredSpot.ScreenUV 和 InfraredSpot.CameraLocation 获得
  45. public InfraredSpot[] InfraredSpots
  46. {
  47. get
  48. {
  49. return infraredSpotBuffer;
  50. }
  51. }
  52. // 1个灯, 坐标通过 InfraredSpot.ScreenUV 和 InfraredSpot.CameraLocation 获得
  53. public InfraredSpot InfraredSpotSingle
  54. {
  55. get
  56. {
  57. return infraredSpotBuffer[0];
  58. }
  59. }
  60. public InfraredSpot[] infraredSpotBuffer => IsSinglePoint()?infraredLocate.InfraredSpotBuffer: infraredLocate.InfraredSpotBuffer.Reverse().ToArray();
  61. // 当前 应用/生效 的屏幕四边形数据(QuadrilateralInCamera类)
  62. public QuadrilateralInCamera CurrentScreenQuad => screenIdentification.Screen.QuadInCamera;
  63. /// <summary>
  64. /// 定位之后,可能有3种结果(手动、半自动、自动),从中选择一种作为最终识别到的屏幕。
  65. /// 如果选择的是null,即没有识别到屏幕,则返回false,否则返回true
  66. /// </summary>
  67. public bool SelectScreenAfterLocate(ScreenIdentificationTag tag) => ScreenIdentification.SelectScreenAfterLocate(tag);
  68. /// 上一次屏幕定位的情况, 还未识别(或识别失败)的时候返回值是null
  69. public QuadrilateralInCamera LastQuadState(ScreenIdentificationTag tag)
  70. {
  71. QuadrilateralInCamera target = tag switch
  72. {
  73. ScreenLocate.ScreenIdentificationTag.Manual => ScreenIdentification.QuadManual,
  74. ScreenLocate.ScreenIdentificationTag.SemiAuto => ScreenIdentification.QuadSemiAuto,
  75. ScreenLocate.ScreenIdentificationTag.Auto => ScreenIdentification.QuadAuto,
  76. _ => null
  77. };
  78. return target;
  79. }
  80. /// <summary>
  81. /// 上一次半自动识别的情况, 还未识别的时候数组是null
  82. /// 通过索引获取布尔值,false代表这条边识别失败(回退应用了手动数据), 0-下、1-右、2-上、3-左
  83. /// </summary>
  84. public bool[] LastQuadSemiAutoState() => screenIdentification.LastQuadSemiAutoState;
  85. /// <summary>
  86. /// 获取算法执行过程中输出的纹理,0原图,1半自动识别到的全部线段,2屏幕黑白色差,3识别结果,4屏幕色差叠加识别结果,5半自动时的备选线段
  87. /// </summary>
  88. public Texture2D[] OutputTextures => outputTexture2D;
  89. public Vector2 CameraLocationOffset
  90. {
  91. get=>infraredLocate.CameraLocationOffset;
  92. set{
  93. infraredLocate.SetCameraLocationOffset(value);
  94. }
  95. }
  96. public Vector2 UVOffset
  97. {
  98. get=>infraredLocate.UVOffset;
  99. set{
  100. infraredLocate.SetUVOffset(value);
  101. }
  102. }
  103. //用来记录最后一次更新的数据
  104. Vector2 OldCameraLocationOffset { get; set; } = new Vector2(0, 0);
  105. //用来记录最后一次更新的数据
  106. Vector2 OldUVOffset { get; set; } = new Vector2(0, 0);
  107. #region 双点情况
  108. public Vector2[] curCameraLocationOffsets
  109. {
  110. get => CameraLocationOffsets;
  111. set
  112. {
  113. // 确保数组大小正确
  114. if (value.Length == 2) // 假设你只需要两个元素
  115. {
  116. CameraLocationOffsets = value;
  117. }
  118. }
  119. }
  120. public Vector2[] curUVOffsets
  121. {
  122. get => UVOffsets;
  123. set
  124. {
  125. // 确保数组大小正确
  126. if (value.Length == 2) // 假设你只需要两个元素
  127. {
  128. UVOffsets = value;
  129. }
  130. }
  131. }
  132. // 用来记录最后一次更新的数据,改为数组形式,默认是零
  133. Vector2[] CameraLocationOffsets { get; set; } = new Vector2[2] { Vector2.zero, Vector2.zero };
  134. // 用来记录最后一次更新的数据,改为数组形式,默认是零
  135. Vector2[] UVOffsets { get; set; } = new Vector2[2] { Vector2.zero, Vector2.zero };
  136. // 用来记录最后一次更新的数据,改为数组形式,默认是零
  137. Vector2[] OldCameraLocationOffsets { get; set; } = new Vector2[2] { Vector2.zero, Vector2.zero };
  138. // 用来记录最后一次更新的数据,改为数组形式,默认是零
  139. Vector2[] OldUVOffsets { get; set; } = new Vector2[2] { Vector2.zero, Vector2.zero };
  140. #endregion
  141. //是否单点显示
  142. public bool bSinglePoint => infraredLocate.bSinglePoint;
  143. // public InfraredDemo InfraredDemoMain => FindObjectOfType<InfraredDemo>();
  144. #region UVC 处理的对象
  145. //public UVCManager mUVCManager;
  146. public CameraInfo mUVCCameraInfo;
  147. public bool getUVCCameraInfo => mUVCCameraInfo != null ? true : false;
  148. public Vector2 getUVCCameraInfoSize => getUVCCameraInfo ? mUVCCameraInfo.Size : new Vector2(320, 240);
  149. private Texture mUVCTexture;
  150. public Texture getUVCTexture => mUVCTexture;
  151. public Texture setUVCTexture
  152. {
  153. set
  154. {
  155. mUVCTexture = value;
  156. }
  157. }
  158. private Texture2D mUVCTexture2D;
  159. // [SerializeField] Texture2DArray mUVCOutArray;
  160. #endregion
  161. public Text Info;
  162. public List<RectTransform> CrosshairInCamera;
  163. public List<RectTransform> CrosshairInScreen;
  164. public RectTransform ScreenQuad;
  165. public Toggle SaveToggle;
  166. public Toggle FullScreenToggle;
  167. public Toggle SingleToggle;
  168. public LineGenerator UILineGenerator;
  169. public bool ShowScreenQuad = false;
  170. // 显示在demo上的rawImage
  171. public List<RawImage> outputRawImages;
  172. readonly Texture2D[] outputTexture2D = new Texture2D[8];
  173. public RawImage FullScreenImage;
  174. public PixelCheaker ScreenPixelCheaker;
  175. public List<Texture2D> DebugScreenImages = new List<Texture2D>();
  176. public bool DebugOnZIMDemo = false;
  177. // private SynchronizationContext mainContext;
  178. public float ReDoLocateCalibrationRatio { get; private set; } // 半自动定位时校准的距离比例,以手动的结果来校准,离手动太远的线段会被舍弃
  179. public void SetCameraSize(Vector size) => cameraSize = size;
  180. public override Vector CameraSize => cameraSize;
  181. // 记录算法中的CameraSize,红外识别和屏幕识别都会使用到
  182. Vector cameraSize;
  183. bool bIdentifyRed = true;//默认设备红色
  184. bool bIdentifyGreen = true;
  185. #region 性能检测相关
  186. public Text m_UITime;
  187. const float m_UIUpdateInterval = 0.1f;
  188. float m_UIUpdateTimer = 0.0f;
  189. List<float> m_History = new List<float>(100);
  190. int m_ValidHistoryFrames = 0;
  191. float m_AverageTime = float.NaN;
  192. float m_MedianTime = float.NaN;
  193. float m_MinTime = float.NaN;
  194. float m_MaxTime = float.NaN;
  195. public float updateInterval = 0.5F;
  196. private double lastInterval;
  197. private int frames = 0;
  198. private float fps;
  199. public Text m_FPS;
  200. #endregion
  201. #region PC部分参数
  202. //亮度
  203. public float pcBrightness { get; set; } = 0.0f;
  204. //对比度
  205. public float pcContrast { get; set; } = 0.0f;
  206. #endregion
  207. // 红外灯识别算法
  208. InfraredLocate infraredLocate;
  209. // 屏幕识别算法
  210. o0.Project.ScreenIdentification screenIdentification;
  211. public o0.Project.ScreenIdentification ScreenIdentification => screenIdentification;
  212. RectTransform canvas;
  213. Mode mode;
  214. //List<(Vector2 pos, GameObject go)> pointManual = new List<(Vector2, GameObject)>();
  215. //o0.Project.WebCam o0WebCam = null;
  216. /// <summary>
  217. /// 正在识别的状态,自动识别时候记录
  218. /// </summary>
  219. bool bAutomaticRecognition { get; set; } = false;//进行捕获时
  220. bool bAutomaticRecognitionStart { get; set; } = false;//是否进行捕获
  221. bool bAutomaticRecognitionEnd { get; set; } = false;//是否结束捕获
  222. [NonSerialized] public RectTransform BackQuad = null;
  223. static public ScreenLocate Main { get; private set; }
  224. static public void AutoLightPixels(Color[] pixels, int width, int height)
  225. {
  226. if (Main.DebugOnZIMDemo)
  227. {
  228. var newTex = pixels.zimAutoLightSimple(width, height);
  229. DebugTexture(7, newTex);
  230. try
  231. {
  232. Main.FullScreenImage.texture = newTex;
  233. }
  234. catch { }
  235. }
  236. }
  237. static public void DebugTexture(int index, Texture2D texture)
  238. {
  239. LateDestory(Main.outputTexture2D[index]);
  240. Main.outputTexture2D[index] = texture;
  241. try
  242. {
  243. Main.outputRawImages[index].texture = texture;
  244. }
  245. catch { }
  246. }
  247. static void LateDestory(UnityEngine.Object o) => Main.StartCoroutine(Main.LateDestoryIEnum(o));
  248. static public void SetScreen(UnityEngine.Color? color = null)
  249. {
  250. if (Main.BackQuad == null)
  251. {
  252. var canvas = GameObject.Find("WebCameraView").GetComponent<RectTransform>();
  253. var background = canvas.Find("Background");
  254. Main.BackQuad = background.GetChild(0).GetComponent<RectTransform>();
  255. }
  256. Main.BackQuad.parent.gameObject.SetActive(color != null);
  257. Main.BackQuad.GetComponent<RawImage>().color = color ?? Color.black;
  258. //Debug.Log("Set Screen " + color.GetColorName());
  259. }
  260. static public void SetScreen(Rect rect, UnityEngine.Color? color = null)
  261. {
  262. if (Main.BackQuad == null)
  263. {
  264. var canvas = GameObject.Find("WebCameraView").GetComponent<RectTransform>();
  265. var background = canvas.Find("Background");
  266. Main.BackQuad = background.GetChild(0).GetComponent<RectTransform>();
  267. }
  268. Main.BackQuad.parent.gameObject.SetActive(color != null);
  269. Main.BackQuad.anchorMin = rect.min;
  270. Main.BackQuad.anchorMax = rect.max;
  271. Main.BackQuad.GetComponent<RawImage>().color = color ?? Color.black;
  272. //Debug.Log("Set Screen " + color.GetColorName());
  273. }
  274. static void DebugBackQuad(Rect? rect = null)
  275. {
  276. if (Main.BackQuad)
  277. {
  278. Main.BackQuad.parent.GetComponent<RawImage>().enabled = false;
  279. Main.BackQuad.GetComponent<RawImage>().color = Color.white;
  280. Main.BackQuad.parent.gameObject.SetActive(!Main.BackQuad.parent.gameObject.activeSelf);
  281. if (rect.HasValue)
  282. {
  283. Main.BackQuad.anchorMin = rect.Value.min;
  284. Main.BackQuad.anchorMax = rect.Value.max;
  285. }
  286. }
  287. }
  288. //public void ReSizeTexture(int width, int height)
  289. //{
  290. // Debug.Log("Cur mUVCTexture Size: [" + mUVCTexture.width + "," + mUVCTexture.height + "]");
  291. // if (mUVCTexture.width < width || mUVCTexture.height < height) // 如果当前分辨率太小,则重新new一个texture
  292. // {
  293. // Texture2D tex = new Texture2D(
  294. // width, height,
  295. // TextureFormat.ARGB32,
  296. // false, /* mipmap */
  297. // true /* linear */);
  298. // tex.filterMode = FilterMode.Point;
  299. // tex.Apply();
  300. // mUVCTexture = tex;
  301. // mUVCCameraInfo.previewTexture = tex;
  302. // var nativeTexPtr = mUVCCameraInfo.previewTexture.GetNativeTexturePtr();
  303. // }
  304. //}
  305. void Awake()
  306. {
  307. if (Main != null)
  308. throw new Exception("[ScreenLocaer] 不允许多个实例");
  309. Main = this;
  310. #if !UNITY_EDITOR_WIN
  311. DebugOnZIMDemo = false;
  312. #endif
  313. //if (mUVCDrawer)
  314. // mUVCDrawer.StartPreviewAction += UVCIsReady;
  315. }
  316. void OnDestroy()
  317. {
  318. //if (mUVCDrawer)
  319. // mUVCDrawer.StartPreviewAction -= UVCIsReady;
  320. }
  321. void Start()
  322. {
  323. //mainContext = SynchronizationContext.Current;
  324. canvas = transform.GetComponent<RectTransform>();
  325. mode = Mode.InfraredLocate;
  326. if (DebugScreenImages.Count != 0 && DebugOnZIMDemo)
  327. {
  328. screenIdentification = new o0.Project.ScreenIdentification(this);
  329. screenIdentification.LocateScreen();
  330. }
  331. ReDoLocateCalibrationRatio = 0.125f;
  332. #region 性能检测相关
  333. for (var i = 0; i < m_History.Capacity; ++i)
  334. {
  335. m_History.Add(0.0f);
  336. }
  337. lastInterval = Time.realtimeSinceStartup;
  338. frames = 0;
  339. #endregion
  340. }
  341. // 初始化算法
  342. bool bInitScreenIdentificationAndInfraredLocate = false;
  343. void AlgorithmInit()
  344. {
  345. if (screenIdentification == null)
  346. {
  347. screenIdentification = new o0.Project.ScreenIdentification(this);
  348. Debug.Log("[ScreenLocate] 初始化屏幕识别");
  349. //screenIdentification.OnLocateScreenEnter += OnLocateScreenEnter;
  350. screenIdentification.OnLocateScreenEnd += OnLocateScreenEnd;
  351. //初始化屏幕数据
  352. InfraredCameraHelper?.InitScreenLocateManual();
  353. }
  354. if (infraredLocate == null)
  355. {
  356. infraredLocate = new InfraredLocate(this, screenIdentification.Screen);
  357. cameraSize = new Vector(getUVCCameraInfoSize.x, getUVCCameraInfoSize.y);
  358. InfraredSpot.RefreshMinVerifyLength(new o0.Geometry2D.Float.Vector(getUVCCameraInfoSize.x, getUVCCameraInfoSize.y));
  359. Debug.Log($"[ScreenLocate] 初始化红外灯识别, 当前相机分辨率: {CameraSize.x}×{CameraSize.y},红外算法追踪距离: {InfraredSpot.MinVerifyLength}");
  360. //InfraredDemo 初始化
  361. //float redfilterValue = PlayerPrefs.GetFloat("Init redFilterSliderValue", 0.8f);
  362. //Debug.Log("Init Red filterValue:" + redfilterValue);
  363. //infraredLocate.SetBrightnessThreshold(redfilterValue); // 参数是 红外灯的亮度阈值,阈值越小能够检测到的亮度就越低,默认值是0.93
  364. // UI相关
  365. if (SingleToggle != null)
  366. {
  367. infraredLocate.SetSinglePoint(SingleToggle.isOn);
  368. SingleToggle.onValueChanged.AddListener((i) =>
  369. {
  370. infraredLocate.SetSinglePoint(i);
  371. });
  372. }
  373. }
  374. if (screenIdentification != null && infraredLocate != null && !bInitScreenIdentificationAndInfraredLocate)
  375. {
  376. InfraredCameraHelper?.InvokeOnScreenLocateIsReady();
  377. bInitScreenIdentificationAndInfraredLocate = true;
  378. }
  379. }
  380. IEnumerator LateDestoryIEnum(UnityEngine.Object o)
  381. {
  382. if (o)
  383. {
  384. yield return new WaitForEndOfFrame();
  385. Destroy(o);
  386. }
  387. }
  388. //ZIMWebCamera场景使用
  389. public void WebCamIsReady(Texture texture)
  390. {
  391. mPlatform = Platform.Window;
  392. mUVCTexture = texture;
  393. mUVCCameraInfo = new CameraInfo(mUVCTexture);
  394. brightness = 0;
  395. //UVC准备好
  396. InfraredCameraHelper?.InvokeOnUVCIsReady(mUVCCameraInfo);
  397. }
  398. /// <summary>
  399. /// UVCManager 创建初始化时候,更新此函数
  400. /// </summary>
  401. /// <param name="cameraInfo"></param>
  402. public void UVCIsReady(CameraInfo cameraInfo)
  403. {
  404. mPlatform = Platform.Android;
  405. mUVCTexture = cameraInfo.previewTexture;
  406. mUVCCameraInfo = cameraInfo;
  407. Debug.Log("UVCIsReady:" + mUVCCameraInfo);
  408. //UVC准备好
  409. InfraredCameraHelper?.InvokeOnUVCIsReady(mUVCCameraInfo);
  410. }
  411. /// <summary>
  412. /// 获取新的 previewTexture
  413. /// </summary>
  414. public void UVCUpdate(bool bChange)
  415. {
  416. mUVCTexture = mUVCCameraInfo.previewTexture;
  417. Debug.Log("[ScreenLocate] UVCUpdate:" + mUVCCameraInfo + ",bChange:" + bChange);
  418. InfraredCameraHelper?.InvokeOnUVCIsUpdate();
  419. //这里判断是否进入自动识别?
  420. if (bAutomaticRecognitionStart)
  421. {
  422. bAutomaticRecognitionStart = false;
  423. Debug.Log("[ScreenLocate] UVCUpdate 开始自动识别 Capture:" + Capture + " ,Delay: " + Delay);
  424. screenIdentification.LocateScreen(Capture, Delay);
  425. }
  426. if (bAutomaticRecognitionEnd)
  427. {
  428. bAutomaticRecognitionEnd = false;
  429. Debug.Log("[ScreenLocate] UVCUpdate 结束捕获,当前摄像机分辨率为: " + mUVCCameraInfo.Size);
  430. bAutomaticRecognition = false;
  431. }
  432. }
  433. /// <summary>
  434. /// 选择模式后更新 quadUnityVectorList
  435. /// </summary>
  436. public void UpdateQuadUnityVectorList()
  437. {
  438. quadUnityVectorList = screenIdentification.Screen.QuadInCamera.GetUnityVertexNormalizedList();
  439. SaveScreenLocateVectorList();
  440. }
  441. int brightness = 0;
  442. /// <summary>
  443. /// 设置算法红外灯的亮度值
  444. /// </summary>
  445. /// <param name="value"></param>
  446. public void SetInfraredLocateBrightnessThreshold(float value)
  447. {
  448. if (infraredLocate != null)
  449. infraredLocate.SetBrightnessThreshold(value); // 参数是 红外灯的亮度阈值,阈值越小能够检测到的亮度就越低,默认值是0.93
  450. }
  451. void Update()
  452. {
  453. //++frames;
  454. //float timeNow = Time.realtimeSinceStartup;
  455. //if (timeNow > lastInterval + updateInterval)
  456. //{
  457. // fps = (float)(frames / (timeNow - lastInterval));
  458. // frames = 0;
  459. // lastInterval = timeNow;
  460. //}
  461. //if (m_FPS != null)
  462. // m_FPS.text = "FPS:" + fps.ToString("f2");
  463. if (mUVCCameraInfo == null) return;
  464. AlgorithmInit();
  465. if (mUVCCameraInfo != null && mUVCCameraInfo.IsPreviewing)
  466. {
  467. //根据getUVCCameraInfoSize 分辨率渲染
  468. CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  469. if (!screenIdentification.Update(mUVCTexture2D))
  470. {
  471. // 同步分辨率, 分辨率变化后还需同步到InfraredDemo
  472. if (RefreshCameraSize())
  473. {
  474. if (screenIdentification.Screen.QuadInCamera != null)
  475. {
  476. quadUnityVectorList = screenIdentification.Screen.QuadInCamera.GetUnityVertexNormalizedList();
  477. if (!ContainsNaN(quadUnityVectorList))
  478. {
  479. SaveScreenLocateVectorList();
  480. //SyncInfraredDemo();
  481. //SyncInfraredScreenPositioningView();
  482. InfraredCameraHelper?.InvokeOnUVCPosUpdate(quadUnityVectorList);
  483. Debug.Log("[ScreenLocate] RefreshCameraSize 屏幕size改变:[" + (int)getUVCCameraInfoSize.x + "," + (int)getUVCCameraInfoSize.y + "]");
  484. Debug.Log("[ScreenLocate] RefreshCameraSize 屏幕size改变,刷新quadUnityVectorList:" + PrintVector2List(quadUnityVectorList));
  485. }
  486. else
  487. {
  488. Debug.LogError("[ScreenLocate] RefreshCameraSize 屏幕size改变,存在NaN值,重新校准:" + PrintVector2List(quadUnityVectorList));
  489. }
  490. }
  491. if (DebugOnZIMDemo)
  492. Main.ShowScreen(screenIdentification.Screen.QuadInCamera);
  493. }
  494. if (CameraSize.x != mUVCTexture2D.width || CameraSize.y != mUVCTexture2D.height)
  495. {
  496. Debug.Log($"<color=red>[ScreenLocate] 分辨率不匹配,相机分辨率为: {getUVCCameraInfoSize}, mUVCTexture2D纹理尺寸: {mUVCTexture2D.width}×{mUVCTexture2D.height}</color>");
  497. return;
  498. }
  499. // 获取像素,用于后续操作
  500. var pixels = mUVCTexture2D.GetPixels(); // 从左往右、从下往上
  501. AutoLightPixels(pixels, CameraWidth, CameraHeight);
  502. InfraredUpdate(pixels);
  503. if (mode == Mode.ScreenLocateManual)
  504. {
  505. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  506. {
  507. if (infraredSpotBuffer[i].CameraLocation != null)
  508. {
  509. // 检测到光点
  510. var posInCanvas = infraredSpotBuffer[i].CameraLocation.Value.pixelToLocalPosition_AnchorCenter(CameraSize, FullScreenImage.rectTransform.rect);
  511. CrosshairInCamera[i].gameObject.SetActive(true);
  512. CrosshairInCamera[i].anchoredPosition = posInCanvas;
  513. }
  514. else
  515. CrosshairInCamera[i].gameObject.SetActive(false);
  516. }
  517. }
  518. else if (mode == Mode.InfraredLocate)
  519. {
  520. if (mPlatform == Platform.Window) //渲染ui上面的点。进入游戏可以隐藏
  521. {
  522. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  523. {
  524. if (infraredSpotBuffer[i].CameraLocation != null)
  525. {
  526. // 检测到光点
  527. var posInCanvas = infraredSpotBuffer[i].CameraLocation.Value.pixelToLocalPosition_AnchorCenter(CameraSize, outputRawImages[0].rectTransform.rect);
  528. CrosshairInCamera[i].gameObject.SetActive(true);
  529. CrosshairInCamera[i].anchoredPosition = posInCanvas;
  530. }
  531. else
  532. CrosshairInCamera[i].gameObject.SetActive(false);
  533. }
  534. }
  535. //手机端使用 mPlatform == Platform.Android &&
  536. //通用,手机 和 PC
  537. if (infraredSpotBuffer.Length > 0)
  538. {
  539. //int redIndex = 0;
  540. //int greenIndex = 1;
  541. //仅仅第一个点显示(如果最大点出界了会闪烁)
  542. //if (bSinglePoint)
  543. //{
  544. // redIndex = 0; //单点识别是,可以选择切换颜色
  545. // if (infraredSpotBuffer[redIndex].ScreenUV != null)
  546. // {
  547. // string str = "Single:";
  548. // Info.text = str + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  549. // //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  550. // onFilterPos(infraredSpotBuffer[redIndex].ScreenUV.Value);
  551. // }
  552. //}
  553. //else
  554. //{
  555. // //雙點模式下選擇第一個點
  556. // if (bIdentifyRed && !bIdentifyGreen)
  557. // {
  558. // if (infraredSpotBuffer[redIndex].ScreenUV != null)
  559. // {
  560. // Info.text = "Red" + redIndex + ":" + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  561. // //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  562. // onFilterPos2(infraredSpotBuffer[redIndex].ScreenUV.Value, redIndex);
  563. // }
  564. // else
  565. // {
  566. // Info.text = "未检测到红色最大点!";
  567. // }
  568. // }
  569. // else if (!bIdentifyRed && bIdentifyGreen)
  570. // {
  571. // if (infraredSpotBuffer[greenIndex].ScreenUV != null)
  572. // {
  573. // Info.text = "Green:" + infraredSpotBuffer[greenIndex].ScreenUV.Value.ToString("F4");
  574. // //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[greenIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[greenIndex].ScreenUV.Value.y * Screen.height, 0));
  575. // onFilterPos2(infraredSpotBuffer[greenIndex].ScreenUV.Value, greenIndex);
  576. // }
  577. // else
  578. // {
  579. // Info.text = "未检测到绿色点!";
  580. // }
  581. // }
  582. // else
  583. // {
  584. // //两个不选择和两个全选都跑识别两个点
  585. // //自動切換 检测到光点
  586. // if (infraredSpotBuffer[redIndex].ScreenUV != null)
  587. // {
  588. // Info.text = "Red:" + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  589. // //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  590. // onFilterPos2(infraredSpotBuffer[redIndex].ScreenUV.Value, redIndex);
  591. // }
  592. // else if (infraredSpotBuffer[greenIndex].ScreenUV != null)
  593. // {
  594. // Info.text = "Green:" + infraredSpotBuffer[greenIndex].ScreenUV.Value.ToString("F4");
  595. // //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[greenIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[greenIndex].ScreenUV.Value.y * Screen.height, 0));
  596. // onFilterPos2(infraredSpotBuffer[greenIndex].ScreenUV.Value, greenIndex);
  597. // }
  598. // else
  599. // {
  600. // Info.text = "未检测到点!";
  601. // }
  602. // }
  603. //}
  604. //仅仅第一个点显示(如果最大点出界了会闪烁)
  605. if (bSinglePoint)
  606. {
  607. int redIndex = 0; //单点识别是,可以选择切换颜色
  608. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  609. {
  610. onFilterPos(infraredSpotBuffer[redIndex].ScreenUV.Value);
  611. onCameraLocationToUVOffset(infraredSpotBuffer[redIndex].CameraLocation.Value);
  612. //单点使用sdk管理的变量
  613. InfraredCameraHelper?.InvokeOnPositionUpdate(targetPos, targetCameraLocation);
  614. }
  615. }
  616. else
  617. {
  618. //同时通知两个点的变化
  619. if (infraredSpotBuffer[0].ScreenUV != null) onFilterPos2(infraredSpotBuffer[0].ScreenUV.Value, 0);
  620. if (infraredSpotBuffer[1].ScreenUV != null) onFilterPos2(infraredSpotBuffer[1].ScreenUV.Value, 1);
  621. //传送一个cameraLocation
  622. if (infraredSpotBuffer[0].CameraLocation != null && infraredSpotBuffer[0].CameraLocation.HasValue) onCameraLocationToUVOffset(infraredSpotBuffer[0].CameraLocation.Value, 0);
  623. if (infraredSpotBuffer[1].CameraLocation != null && infraredSpotBuffer[1].CameraLocation.HasValue) onCameraLocationToUVOffset(infraredSpotBuffer[1].CameraLocation.Value, 1);
  624. //两个点的变化坐标过去,和一个未变化的数据
  625. InfraredCameraHelper?.InvokeOnPositionUpdate2(_targetPoints2, _targetCameraLocationPoints2);
  626. }
  627. }
  628. }
  629. else if (mode == Mode.ScreenMap && DebugOnZIMDemo)
  630. {
  631. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  632. {
  633. if (infraredSpotBuffer[i].ScreenUV != null)
  634. {
  635. // 检测到光点
  636. var posInCanvas = infraredSpotBuffer[i].ScreenUV.Value.pixelToLocalPosition_AnchorCenter(new Vector2(1, 1), canvas.rect);
  637. CrosshairInScreen[i].gameObject.SetActive(true);
  638. CrosshairInScreen[i].anchoredPosition = posInCanvas;
  639. }
  640. else
  641. CrosshairInScreen[i].gameObject.SetActive(false);
  642. }
  643. if (Input.GetKeyDown(KeyCode.Escape))
  644. ToMode(Mode.InfraredLocate);
  645. }
  646. }
  647. }
  648. //var t1 = Time.realtimeSinceStartup;
  649. //var dt = t1 - t0;
  650. //m_History[m_ValidHistoryFrames % m_History.Count] = dt;
  651. //++m_ValidHistoryFrames;
  652. //m_UIUpdateTimer += Time.deltaTime;
  653. //if (m_UIUpdateTimer >= m_UIUpdateInterval)
  654. //{
  655. // m_UIUpdateTimer = 0.0f;
  656. // if (m_ValidHistoryFrames >= m_History.Count)
  657. // {
  658. // m_ValidHistoryFrames = 0;
  659. // m_AverageTime = 0.0f;
  660. // m_MinTime = float.PositiveInfinity;
  661. // m_MaxTime = float.NegativeInfinity;
  662. // {
  663. // for (var i = 0; i < m_History.Count; i++)
  664. // {
  665. // var time = m_History[i];
  666. // m_AverageTime += time;
  667. // m_MinTime = Mathf.Min(m_MinTime, time);
  668. // m_MaxTime = Mathf.Max(m_MaxTime, time);
  669. // }
  670. // m_AverageTime /= m_History.Count;
  671. // }
  672. // {
  673. // m_History.Sort();
  674. // // Odd-length history?
  675. // if ((m_History.Count & 1) != 0)
  676. // {
  677. // m_MedianTime = m_History[m_History.Count / 2];
  678. // }
  679. // else
  680. // {
  681. // m_MedianTime = (m_History[m_History.Count / 2] + m_History[m_History.Count / 2 - 1]) / 2.0f;
  682. // }
  683. // }
  684. // }
  685. // var statistics = $"{m_History.Count} 帧样本:\naverage: {m_AverageTime * 1000.0f:F2}ms\nmedian: {m_MedianTime * 1000.0f:F2}ms\nmin: {m_MinTime * 1000.0f:F2}ms\nmax: {m_MaxTime * 1000.0f:F2}ms\n";
  686. // //Method: {m_Method} {UnityEngine.SceneManagement.SceneManager.GetActiveScene().name} |
  687. // if (m_UITime != null)
  688. // m_UITime.text = $"Cam: {mUVCCameraInfo.CurrentWidth}x{mUVCCameraInfo.CurrentHeight}{(mUVCTexture2D? ",T2D:" : "")}{(mUVCTexture2D? mUVCTexture2D.width+ "x" : "")}{(mUVCTexture2D ? mUVCTexture2D.height:"")} \nLast Frame: {dt * 1000.0f:F2}ms \n{statistics}";
  689. //}
  690. //UpdateInputs();
  691. if (DebugOnZIMDemo)
  692. {
  693. if (Input.GetKeyDown(KeyCode.Z))
  694. SelectScreenAfterLocate(ScreenIdentificationTag.Manual);
  695. if (Input.GetKeyDown(KeyCode.X))
  696. SelectScreenAfterLocate(ScreenIdentificationTag.SemiAuto);
  697. if (Input.GetKeyDown(KeyCode.C))
  698. SelectScreenAfterLocate(ScreenIdentificationTag.Auto);
  699. }
  700. }
  701. public void InfraredUpdate(Color[] cameraPixels)
  702. {
  703. infraredLocate.InfraredUpdate(cameraPixels);
  704. if (DebugOnZIMDemo)
  705. DebugPixelSpotArea(infraredLocate.DebugAreas);
  706. }
  707. private bool RefreshCameraSize()
  708. {
  709. var sizeNew = new Vector(getUVCCameraInfoSize.x, getUVCCameraInfoSize.y);
  710. var sizeNewFloat = getUVCCameraInfoSize.o0Vector();
  711. if (sizeNew != CameraSize || (screenIdentification?.Screen?.QuadInCamera != null && sizeNewFloat != screenIdentification.Screen.QuadInCamera.CameraSize))
  712. {
  713. Debug.Log($"<color=aqua>[ScreenLocate] 分辨率变化,刷新分辨率(from {CameraSize.x}×{CameraSize.y} to {sizeNew.x}×{sizeNew.y}), 是否有屏幕数据: {screenIdentification.Screen.QuadInCamera != null}, 是否有手动数据: {screenIdentification.QuadManual != null}</color>");
  714. // 同步相机分辨率
  715. cameraSize = sizeNew;
  716. screenIdentification.Screen.RefreshCameraSize(sizeNewFloat);
  717. screenIdentification.QuadAuto?.ReSize(sizeNewFloat, ScreenMap.ViewAspectRatioSetting);
  718. screenIdentification.QuadManual?.ReSize(sizeNewFloat, ScreenMap.ViewAspectRatioSetting);
  719. screenIdentification.QuadSemiAuto?.ReSize(sizeNewFloat, ScreenMap.ViewAspectRatioSetting);
  720. InfraredSpot.RefreshMinVerifyLength(sizeNewFloat);
  721. return true;
  722. }
  723. return false;
  724. }
  725. #region 单点双点目前不同情况处理,双点再sdk外计算存储
  726. //单点使用sdk变量
  727. Vector2 targetPos = Vector2.zero;
  728. Vector2 targetCameraLocation = Vector2.zero;
  729. public float filterDis = 3.0f;
  730. void onFilterPos(Vector2 _vector2Pos)
  731. {
  732. //添加一个偏移量,使得最后输出的准心是指向正中心
  733. Vector2 np = new Vector2((_vector2Pos.x - UVOffset.x) * Screen.width, (_vector2Pos.y - UVOffset.y) * Screen.height); //_vector2Pos.pixelToLocalPosition_AnchorCenter(Vector2.one, (transform as RectTransform).rect);
  734. if (Vector2.Distance(np, targetPos) >= filterDis)
  735. {
  736. targetPos = np;
  737. }
  738. }
  739. void onCameraLocationToUVOffset(Vector2 _vector2Pos)
  740. {
  741. Vector2 np = new Vector2(_vector2Pos.x - CameraLocationOffset.x, _vector2Pos.y - CameraLocationOffset.y);
  742. targetCameraLocation = np;
  743. }
  744. //双点使用本地代码处理
  745. Vector2[] _targetPoints2 = new Vector2[] { Vector2.zero, Vector2.zero };
  746. void onFilterPos2(Vector2 _vector2Pos, int index)
  747. {
  748. Vector2 np = new Vector2((_vector2Pos.x - UVOffsets[index].x) * Screen.width, (_vector2Pos.y - UVOffsets[index].y) * Screen.height);
  749. if (Vector2.Distance(np, _targetPoints2[index]) >= filterDis)
  750. {
  751. _targetPoints2[index] = np;
  752. }
  753. }
  754. Vector2[] _targetCameraLocationPoints2 = new Vector2[] { Vector2.zero, Vector2.zero };
  755. void onCameraLocationToUVOffset(Vector2 _vector2Pos, int index)
  756. {
  757. Vector2 np = new Vector2(_vector2Pos.x - CameraLocationOffsets[index].x, _vector2Pos.y - CameraLocationOffsets[index].y);
  758. _targetCameraLocationPoints2[index] = np;
  759. }
  760. #endregion
  761. public void DebugPixelSpotArea(List<PixelSpotArea> areas)
  762. {
  763. if (areas!=null)
  764. {
  765. Info.transform.GetChild(0).GetComponent<Text>().text = $"areas.Count: {areas.Count}";
  766. PixelSpotArea a0 = null; // 表示最大半径的区域
  767. PixelSpotArea a1 = null; // 表示第二大半径的区域
  768. foreach (var a in areas)
  769. {
  770. if (a0 == null || a.Radius > a0.Radius)
  771. {
  772. a1 = a0; // 更新第二大为之前最大
  773. a0 = a; // 更新最大为当前的
  774. }
  775. else if (a1 == null || a.Radius > a1.Radius)
  776. {
  777. a1 = a; // 更新第二大
  778. }
  779. }
  780. Texture2D texture = new Texture2D(CameraWidth, CameraHeight);
  781. Color[] blackPixels = new Color[texture.width * texture.height];
  782. for (int i = 0; i < blackPixels.Length; i++)
  783. blackPixels[i] = Color.black;
  784. texture.SetPixels(blackPixels);
  785. if (a0 != null)
  786. {
  787. foreach (var p in a0.Pixels0)
  788. texture.SetPixel((int)p.x, (int)p.y, Color.yellow);
  789. foreach (var p in a0.Pixels1)
  790. texture.SetPixel((int)p.x, (int)p.y, Color.white);
  791. }
  792. if (a1 != null)
  793. {
  794. foreach (var p in a1.Pixels0)
  795. texture.SetPixel((int)p.x, (int)p.y, Color.green);
  796. foreach (var p in a1.Pixels1)
  797. texture.SetPixel((int)p.x, (int)p.y, Color.blue);
  798. }
  799. texture.Apply();
  800. DebugTexture(6, texture);
  801. }
  802. }
  803. #region 自动识别
  804. int Capture = 30;
  805. int Delay = 30;
  806. Vector2 EnterResolution;
  807. // int DefaultResolutionIndex;
  808. // readonly public int HighScreenLocateResolutionIndex = 2; // 自动识别时,摄像机分辨率固定为1280 * 720 ( 对应索引是2 )
  809. public void BtnScreenLocate()
  810. {
  811. if (DebugScreenImages.Count != 0)
  812. {
  813. //screenIdentification = new o0.Project.ScreenIdentification();
  814. cameraSize = new Vector(DebugScreenImages[0].width, DebugScreenImages[0].height);
  815. WebCamIsReady(DebugScreenImages[0]);
  816. CreateUVCTexture2DIfNeeded();
  817. }
  818. //Debug.Log("BtnScreenLocate Capture:" + Capture + " ,Delay: " + Delay);
  819. //screenIdentification.LocateScreen(Capture, Delay);
  820. OnLocateScreenEnter();
  821. }
  822. // bool log1 = false, log2 = false;
  823. public void OnLocateScreenEnter()
  824. {
  825. bAutomaticRecognition = true;
  826. bAutomaticRecognitionStart = true;
  827. ResetScreenIdentification();
  828. //DefaultResolutionIndex = InfraredDemoMain?.ResolutionIndex ?? 0; // 记录一下进入前的分辨率(游戏场景的分辨率,比识别时更低)
  829. //HighScreenLocateResolutionIndex = InfraredDemoMain.getTextureToResolutionNewIndex(); // index = 0
  830. // Debug.Log("[ScreenLocate] 开始捕获 DefaultResolutionIndex:" + DefaultResolutionIndex + " ,HighScreenLocateResolutionIndex:" + HighScreenLocateResolutionIndex);
  831. // InfraredDemoMain?.SetResolutionNew(HighScreenLocateResolutionIndex);
  832. EnterResolution = mUVCCameraInfo.Size;// 记录一下进入前的分辨率(游戏场景的分辨率,比识别时更低)
  833. Vector2 _HighResolution = mUVCCameraInfo.CurrentCalibrationResolution; //最高的分辨率
  834. Resize((int)_HighResolution.x, (int)_HighResolution.y);
  835. //CreateUVCTexture2DIfNeeded();
  836. // log1 = true;
  837. // log2 = true;
  838. screenIdentification.LocateScreen(); // 自动识别开始的入口
  839. if (DebugOnZIMDemo)
  840. {
  841. var webCam = GetComponent<ZIMWebCamera>();
  842. webCam.AdjustResolution(1920, 1080);
  843. mUVCCameraInfo.SetSize(webCam.webCamTexture.width, webCam.webCamTexture.height);
  844. }
  845. }
  846. /// <summary>
  847. /// 屏幕识别结束
  848. /// </summary>
  849. public void OnLocateScreenEnd()
  850. {
  851. bAutomaticRecognitionEnd = true;
  852. // 记录本次屏幕识别的分辨率(目前采用高分辨率做识别,识别结束后调回低分辨率)
  853. //InfraredDemoMain?.SetResolutionNew(DefaultResolutionIndex);
  854. Resize((int)EnterResolution.x, (int)EnterResolution.y);
  855. if (DebugOnZIMDemo)
  856. {
  857. var webCam = GetComponent<ZIMWebCamera>();
  858. GetComponent<ZIMWebCamera>().AdjustResolution((int)EnterResolution.x, (int)EnterResolution.y);
  859. mUVCCameraInfo.SetSize(webCam.webCamTexture.width, webCam.webCamTexture.height);
  860. }
  861. // 前面的数据ContainsNaN(quadUnityVectorList) 如果没处理到,这里再进行判断一次
  862. // 识别结束后,也要判断半自动数据,即P5?
  863. if (screenIdentification.QuadAuto == null && screenIdentification.QuadSemiAuto == null)
  864. {
  865. InfraredCameraHelper?.InvokeOnUVCPosUpdate(new List<Vector2>());
  866. Debug.LogError("[ScreenLocate] OnLocateScreenEnd 屏幕未识别");
  867. }
  868. }
  869. /**
  870. * 修改相机的实际分辨率
  871. */
  872. public void Resize(int width, int height)
  873. {
  874. if (mUVCCameraInfo == null) return;
  875. #if UNITY_ANDROID || UNITY_IOS
  876. //发送修改指令给相机实际分辨率
  877. mUVCCameraInfo.SetCameraSize(width, height);
  878. #endif
  879. #if UNITY_STANDALONE_WIN
  880. // pc todo 看看怎么处理
  881. // ResizePC(width, height);
  882. #endif
  883. //mUVCCameraInfo.SetSize(width, height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  884. Debug.Log($"[ScreenLocate] 开始修改分辨率 mUVCCameraInfo origin:[{mUVCCameraInfo.CurrentWidth},{mUVCCameraInfo.CurrentHeight}]=>target:[{width},{height}]");
  885. // if (screenIdentification.isInitLocateScreen()) screenIdentification.bStartLocateScreen = true;
  886. }
  887. /// <summary>
  888. /// pc修改分辨率
  889. /// </summary>
  890. /// <param name="width"></param>
  891. /// <param name="height"></param>
  892. public void ResizePC(int width, int height)
  893. {
  894. if (mUVCCameraInfo == null) return;
  895. //if (screenIdentification.isInitLocateScreen()) screenIdentification.bStartLocateScreen = true;
  896. // PcWebCamera pcWebCamera = GetComponent<PcWebCamera>();
  897. // if(pcWebCamera.webCamTexture == null || !pcWebCamera.webCamTexture.isPlaying) return;
  898. //StartCoroutine(ResetWebCam(pcWebCamera, width, height));
  899. mUVCCameraInfo.SetSize(width, height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  900. Debug.Log("[ScreenLocate] Resize mUVCCameraInfo.SetSize: [" + mUVCCameraInfo.CurrentWidth + "," + mUVCCameraInfo.CurrentHeight + "]");
  901. }
  902. private System.Collections.IEnumerator ResetWebCam(PcWebCamera pcWebCamera, int newWidth, int newHeight)
  903. {
  904. WebCamTexture _webCamTexture = pcWebCamera.webCamTexture;
  905. // Stop the current WebCamTexture
  906. _webCamTexture.Stop();
  907. // Trigger OnWebCamStopped event
  908. // OnWebCamStopped?.Invoke();
  909. // Wait for a short time to ensure resources are released
  910. yield return new WaitForSeconds(0.5f);
  911. // Create a new WebCamTexture with the new dimensions
  912. _webCamTexture = new WebCamTexture(newWidth, newHeight);
  913. pcWebCamera.webCamTexture = _webCamTexture;
  914. mUVCTexture = _webCamTexture;
  915. // Restart the camera
  916. yield return StartCoroutine(StartWebCam(pcWebCamera));
  917. }
  918. private System.Collections.IEnumerator StartWebCam(PcWebCamera pcWebCamera)
  919. {
  920. WebCamTexture _webCamTexture = pcWebCamera.webCamTexture;
  921. _webCamTexture.Play();
  922. // Wait until the WebCamTexture is playing
  923. while (!_webCamTexture.isPlaying)
  924. {
  925. yield return null;
  926. }
  927. // Trigger OnWebCamStarted event
  928. //OnWebCamStarted?.Invoke();
  929. mUVCCameraInfo.SetSize(_webCamTexture.width, _webCamTexture.height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  930. Debug.Log("[ScreenLocate] ResizePc mUVCCameraInfo.SetSize: [" + mUVCCameraInfo.CurrentWidth + "," + mUVCCameraInfo.CurrentHeight + "]");
  931. // if(screenIdentification.isInitLocateScreen())screenIdentification.bStartLocateScreen = true;
  932. }
  933. #endregion
  934. public void BtnScreenMap()
  935. {
  936. ToMode(Mode.ScreenMap);
  937. }
  938. //进入手动定位屏幕
  939. public void BtnScreenLocateManual()
  940. {
  941. ToMode(Mode.ScreenLocateManual);
  942. }
  943. // 重置屏幕识别的数据
  944. public void ResetScreenIdentification()
  945. {
  946. screenIdentification.Screen.Active = false;
  947. }
  948. // threshold 的值是0-1,0代表最近,1代表最远
  949. public void SetReDoLocateCalibrationRatio(float threshold)
  950. {
  951. const float MIN = 0.02f;
  952. const float MAX = 0.32f;
  953. ReDoLocateCalibrationRatio = MIN + (MAX - MIN) * threshold;
  954. }
  955. /// <summary>
  956. /// 固定的顶点顺序: 左下,右下,左上,右上
  957. /// </summary>
  958. public static List<Vector2> quadUnityVectorList = new();
  959. /// <summary>
  960. /// 打印信息
  961. /// </summary>
  962. /// <param name="list">左下,右下,左上,右上</param>
  963. /// <returns></returns>
  964. public string PrintVector2List(List<Vector2> list)
  965. {
  966. if (screenIdentification == null || !screenIdentification.Screen.Active) return "[]";
  967. string result = "";
  968. if (list.Count == 4)
  969. {
  970. result = "左下" + list[0].ToString() + ",右下" + list[1].ToString() + ",左上" + list[2].ToString() + ",右上" + list[3].ToString();
  971. }
  972. else
  973. {
  974. result = "count != 4 error";
  975. }
  976. //foreach (Vector2 vector in list)
  977. //{
  978. // result += vector.ToString() + " ";
  979. //}
  980. //Debug.Log(result);
  981. return result;
  982. }
  983. /// <summary>
  984. /// 判断是否存在NaN
  985. /// </summary>
  986. /// <param name="vectors"></param>
  987. /// <returns></returns>
  988. public bool ContainsNaN(List<Vector2> vectors)
  989. {
  990. foreach (var v in vectors)
  991. {
  992. if (float.IsNaN(v.x) || float.IsNaN(v.y))
  993. {
  994. return true;
  995. }
  996. }
  997. return false;
  998. }
  999. // 标记屏幕的四个角, ScreenQuadObject 下挂了4个子节点用于标记
  1000. public void ShowScreen(RectTransform ScreenQuadObject, QuadrilateralInCamera screen)
  1001. {
  1002. if (screen == null)
  1003. {
  1004. Info.text = "识别屏幕失败";
  1005. return;
  1006. }
  1007. Info.text = "已识别到屏幕";
  1008. //if (ScreenQuadObject && ScreenQuadObject.childCount >= 4)
  1009. //{
  1010. // ScreenQuadObject.gameObject.SetActive(true);
  1011. // for (int i = 0; i < 4; i++)
  1012. // {
  1013. // if (DebugOnZIMDemo)
  1014. // {
  1015. // RectTransform t = ScreenQuadObject.GetChild(i) as RectTransform;
  1016. // t.anchoredPosition = screen.Quad[i].pixelToLocalPosition_AnchorCenter(screen.CameraSize, ScreenQuadObject.rect);
  1017. // }
  1018. // }
  1019. //}
  1020. quadUnityVectorList = screen.GetUnityVertexNormalizedList(); // 记录四个点
  1021. if (!ContainsNaN(quadUnityVectorList))
  1022. {
  1023. SaveScreenLocateVectorList();
  1024. //SyncInfraredDemo();
  1025. if (DebugOnZIMDemo)
  1026. SyncInfraredScreenPositioningView();
  1027. InfraredCameraHelper?.InvokeOnUVCPosUpdate(quadUnityVectorList);
  1028. Debug.Log("[ScreenLocate] ShowScreen 已识别到屏幕,更新quadUnityVectorList:" + PrintVector2List(quadUnityVectorList));
  1029. }
  1030. else
  1031. {
  1032. Debug.LogError("[ScreenLocate] RefreshCameraSize 屏幕size改变,存在NaN值,重新校准:" + PrintVector2List(quadUnityVectorList));
  1033. }
  1034. }
  1035. public void ShowScreen(QuadrilateralInCamera screen) => ShowScreen(ScreenQuad, screen);
  1036. /// <summary>
  1037. /// 校准点位置存储到本地
  1038. /// </summary>
  1039. static public void SaveScreenLocateVectorList()
  1040. {
  1041. //string saveStr = string.Join(";", quadUnityVectorList.Select(v => $"{v.x},{v.y}")); //,{v.z}
  1042. // 如果列表为空,保存空字符串或自定义标记
  1043. string saveStr = quadUnityVectorList.Count > 0 ? string.Join(";", quadUnityVectorList.Select(v => $"{v.x},{v.y}")) : "";
  1044. Debug.Log("SaveScreenLocateVectorList: " + saveStr);
  1045. PlayerPrefs.SetString("ScreenLocateVectorList", saveStr);
  1046. }
  1047. /// <summary>
  1048. /// 获取本地存储校准点位置
  1049. /// </summary>
  1050. static public bool GetScreenLocateVectorList()
  1051. {
  1052. string posListStr = PlayerPrefs.GetString("ScreenLocateVectorList", "");
  1053. Debug.Log("GetScreenLocateVectorList:" + posListStr);
  1054. if (!string.IsNullOrWhiteSpace(posListStr))
  1055. {
  1056. quadUnityVectorList.Clear();
  1057. quadUnityVectorList = posListStr.Split(';')
  1058. .Select(s =>
  1059. {
  1060. string[] parts = s.Split(',');
  1061. return new Vector2(float.Parse(parts[0]), float.Parse(parts[1]));
  1062. })
  1063. .ToList();
  1064. return true;
  1065. }
  1066. else return false;
  1067. }
  1068. public void SetSinglePoint(bool value)=> infraredLocate.SetSinglePoint(value);
  1069. public bool IsSinglePoint() => infraredLocate.IsSinglePoint();
  1070. public Vector2 AdjustPointsOffset(Vector2 inputPoint, string type = "CameraLocation") => infraredLocate.GetCenterOffset(inputPoint, type);
  1071. /// <summary>
  1072. /// 重置偏移量
  1073. /// </summary>
  1074. public void ResetPointsOffest() => infraredLocate.ResetCenterOffset();
  1075. /// <summary>
  1076. /// 初始化记录值
  1077. /// </summary>
  1078. /// <param name="inputPointOffset"></param>
  1079. /// <param name="type"></param>
  1080. /// <returns></returns>
  1081. public Vector2 SetPointsOffset(Vector2 inputPointOffset, string type = "CameraLocation")
  1082. {
  1083. // 计算从原始中心到输入点的偏移量
  1084. if (type == "CameraLocation")
  1085. {
  1086. OldCameraLocationOffset = CameraLocationOffset = inputPointOffset;
  1087. return CameraLocationOffset;
  1088. }
  1089. else
  1090. {
  1091. //ScreenUV
  1092. OldUVOffset = UVOffset = inputPointOffset;
  1093. return UVOffset;
  1094. }
  1095. }
  1096. /// <summary>
  1097. /// 撤销操作,
  1098. /// </summary>
  1099. public void RevokePointsOffest() {
  1100. CameraLocationOffset = OldCameraLocationOffset;
  1101. UVOffset = OldUVOffset;
  1102. }
  1103. /// <summary>
  1104. /// 这里计算一个偏移后的cameraLocatoin位置
  1105. /// </summary>
  1106. /// <param name="cameraLocatoin"></param>
  1107. /// <returns></returns>
  1108. public Vector2 GetOffsetCameraLocation(Vector2 cameraLocatoin)
  1109. {
  1110. return cameraLocatoin - CameraLocationOffset;
  1111. }
  1112. #region 双点下的操作函数
  1113. /// <summary>
  1114. /// 设置记录位置,双点
  1115. /// </summary>
  1116. /// <param name="playerType"></param>
  1117. /// <param name="inputPointOffset"></param>
  1118. /// <param name="type"></param>
  1119. /// <returns></returns>
  1120. public Vector2[] SetPointsOffsets(PlayerType playerType, Vector2 inputPointOffset, string type = "CameraLocation")
  1121. {
  1122. int index = 0;
  1123. if (playerType == PlayerType.FirstPlayer)
  1124. {
  1125. index = 0;
  1126. }
  1127. else
  1128. {
  1129. index = 1;
  1130. }
  1131. // 计算从原始中心到输入点的偏移量
  1132. if (type == "CameraLocation")
  1133. {
  1134. OldCameraLocationOffsets[index] = CameraLocationOffsets[index] = inputPointOffset;
  1135. return CameraLocationOffsets;
  1136. }
  1137. else
  1138. {
  1139. //ScreenUV
  1140. OldUVOffsets[index] = UVOffsets[index] = inputPointOffset;
  1141. return UVOffsets;
  1142. }
  1143. }
  1144. /// <summary>
  1145. /// 撤销操作,双点
  1146. /// </summary>
  1147. public void RevokePointsOffests()
  1148. {
  1149. CameraLocationOffsets = OldCameraLocationOffsets;
  1150. UVOffsets = OldUVOffsets;
  1151. }
  1152. public void ResetPointsOffests()
  1153. {
  1154. SetPointsOffsets(PlayerType.FirstPlayer, Vector2.zero, "CameraLocation");
  1155. SetPointsOffsets(PlayerType.SecondPlayer, Vector2.zero, "ScreenUV");
  1156. }
  1157. /// <summary>
  1158. /// 左右屏校准中心点时候调用
  1159. /// </summary>
  1160. /// <param name="inputPoint"></param>
  1161. /// <param name="type"></param>
  1162. /// <param name="isLeftScreen"></param>
  1163. /// <returns></returns>
  1164. public Vector2 AdjustPointsOffset(Vector2 inputPoint, string type = "CameraLocation", bool isLeftScreen = true)
  1165. {
  1166. Vector2 offset;
  1167. if (type == "CameraLocation")
  1168. {
  1169. // 计算 CameraLocation 的偏移量,考虑左右屏
  1170. Vector2 center = screenIdentification.Screen.TransformToCamera(
  1171. new Vector2(isLeftScreen ? 0.25f : 0.75f, 0.5f) * screenIdentification.Screen.UVSize
  1172. );
  1173. offset = inputPoint - center; // 偏移量 = 输入点 - 中心点
  1174. }
  1175. else
  1176. {
  1177. // UV 坐标系下的偏移量,考虑左右屏
  1178. Vector2 center = new Vector2(isLeftScreen ? 0.25f : 0.75f, 0.5f); // 左屏或右屏中心点
  1179. offset = inputPoint - center; // 偏移量 = 输入点 - 中心点
  1180. }
  1181. return offset;
  1182. }
  1183. #endregion
  1184. void ToMode(Mode mode)
  1185. {
  1186. if (this.mode == mode)
  1187. return;
  1188. if (mode == Mode.ScreenMap)
  1189. {
  1190. if (!screenIdentification.Screen.Active)
  1191. {
  1192. Info.text = "先定位屏幕";
  1193. return;
  1194. }
  1195. Info.text = "按ESC退出";
  1196. SetScreen(Color.black);
  1197. //Info.transform.SetAsLastSibling();
  1198. this.mode = Mode.ScreenMap;
  1199. }
  1200. else if (mode == Mode.InfraredLocate)
  1201. {
  1202. Info.text = screenIdentification.Screen.Active ? "已定位屏幕" : "定位屏幕失败";
  1203. //Info.text = "已识别到屏幕";
  1204. SetScreen(null);
  1205. foreach (var i in CrosshairInScreen)
  1206. i.gameObject.SetActive(false);
  1207. FullScreenImage.gameObject.SetActive(false);
  1208. ScreenPixelCheaker?.HideImage();
  1209. //Info.transform.SetSiblingIndex(transform.childCount - 4);
  1210. this.mode = Mode.InfraredLocate;
  1211. #if (!NDEBUG && DEBUG && ENABLE_LOG)
  1212. Console.WriteLine($"{TAG} Mode.InfraredLocate:已识别到屏幕:{screenIdentification.Screen.Active}");
  1213. #endif
  1214. }
  1215. else if (mode == Mode.ScreenLocateManual)
  1216. {
  1217. Info.text = "左键单击屏幕 左下角";
  1218. FullScreenImage.gameObject.SetActive(true);
  1219. ScreenPixelCheaker?.ShowImage();
  1220. //Info.transform.SetSiblingIndex(transform.childCount - 1);
  1221. // var newTex = WebCamera.webCamTexture.AutoLight(10);
  1222. //DebugTexture(1, TextureToTexture2D(rawImage.texture));
  1223. CreateUVCTexture2DIfNeeded();
  1224. DebugTexture(7, mUVCTexture2D.zimAutoLight(brightness));
  1225. //mUVCTexture2DTemp = TextureToTexture2D(mUVCCameraInfo.previewTexture);
  1226. //DebugTexture(6, mUVCTexture2DTemp.zimAutoLight(brightness));
  1227. this.mode = Mode.ScreenLocateManual;
  1228. }
  1229. }
  1230. private Texture2D TextureToTexture2D(Texture texture, int width = 0, int height = 0)
  1231. {
  1232. if (width == 0)
  1233. width = texture.width;
  1234. if (height == 0)
  1235. height = texture.height;
  1236. Texture2D _texture2D = new Texture2D(width, height, TextureFormat.ARGB32, false, true);
  1237. RenderTexture currentRT = RenderTexture.active;
  1238. RenderTexture renderTexture = RenderTexture.GetTemporary(
  1239. width,
  1240. height,
  1241. 0,
  1242. RenderTextureFormat.ARGB32,
  1243. RenderTextureReadWrite.Linear);
  1244. Graphics.Blit(texture, renderTexture);
  1245. RenderTexture.active = renderTexture;
  1246. _texture2D.ReadPixels(new Rect(0, 0, width, height), 0, 0);
  1247. _texture2D.Apply();
  1248. RenderTexture.active = currentRT;
  1249. RenderTexture.ReleaseTemporary(renderTexture);
  1250. return _texture2D;
  1251. }
  1252. //public void CreateUVCTexture2DFocusSizeIfNeeded(int width, int height)
  1253. //{
  1254. // if (mUVCTexture2D != null)
  1255. // Destroy(mUVCTexture2D);
  1256. // mUVCTexture2D = TextureToTexture2D(mUVCTexture, width, height);
  1257. //}
  1258. /// <summary>
  1259. /// 使用默认的mUVCTexture宽高
  1260. /// </summary>
  1261. private void CreateUVCTexture2DIfNeeded()
  1262. {
  1263. if (mUVCTexture2D != null)
  1264. Destroy(mUVCTexture2D);
  1265. mUVCTexture2D = TextureToTexture2D(mUVCTexture);
  1266. }
  1267. /// <summary>
  1268. /// 根据宽高调整mUVCTexture2D
  1269. /// </summary>
  1270. /// <param name="width"></param>
  1271. /// <param name="height"></param>
  1272. private void CreateUVCTexture2DIfNeeded(int width = 0, int height = 0)
  1273. {
  1274. if (mUVCTexture2D != null)
  1275. Destroy(mUVCTexture2D);
  1276. mUVCTexture2D = TextureToTexture2D(mUVCTexture, width, height);
  1277. }
  1278. #region DoubleButton
  1279. private DateTime m_firstTime;
  1280. private DateTime m_secondTime;
  1281. private void Press()
  1282. {
  1283. Debug.Log("进入手动定位");
  1284. BtnScreenLocateManual();
  1285. resetTime();
  1286. }
  1287. public void OnDoubleClick()
  1288. {
  1289. //超时重置
  1290. if (!m_firstTime.Equals(default(DateTime)))
  1291. {
  1292. var intervalTime = DateTime.Now - m_firstTime;
  1293. float milliSeconds = intervalTime.Seconds * 1000 + intervalTime.Milliseconds;
  1294. if (milliSeconds >= 400)
  1295. resetTime();
  1296. }
  1297. // 按下按钮时对两次的时间进行记录
  1298. if (m_firstTime.Equals(default(DateTime)))
  1299. m_firstTime = DateTime.Now;
  1300. else
  1301. m_secondTime = DateTime.Now;
  1302. // 在第二次点击触发,时差小于400ms触发
  1303. if (!m_firstTime.Equals(default(DateTime)) && !m_secondTime.Equals(default(DateTime)))
  1304. {
  1305. var intervalTime = m_secondTime - m_firstTime;
  1306. float milliSeconds = intervalTime.Seconds * 1000 + intervalTime.Milliseconds;
  1307. if (milliSeconds < 400)
  1308. Press();
  1309. else
  1310. resetTime();
  1311. }
  1312. }
  1313. private void resetTime()
  1314. {
  1315. m_firstTime = default(DateTime);
  1316. m_secondTime = default(DateTime);
  1317. }
  1318. #endregion
  1319. #region 性能检测相关
  1320. void InvalidateTimings()
  1321. {
  1322. m_ValidHistoryFrames = 0;
  1323. m_AverageTime = float.NaN;
  1324. m_MedianTime = float.NaN;
  1325. m_MinTime = float.NaN;
  1326. m_MaxTime = float.NaN;
  1327. }
  1328. void UpdateInputs()
  1329. {
  1330. //重置
  1331. if (Input.GetKeyDown(KeyCode.UpArrow))
  1332. {
  1333. InvalidateTimings();
  1334. }
  1335. }
  1336. #endregion
  1337. }