ScreenLocate.cs 60 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582
  1. #define ENABLE_LOG
  2. using InfraredManager;
  3. using o0;
  4. using o0.Geometry2D.Float;
  5. using o0InfraredLocate.ZIM;
  6. using SixLabors.ImageSharp.PixelFormats;
  7. using SLAMUVC;
  8. using System;
  9. using System.Collections;
  10. using System.Collections.Generic;
  11. using System.Linq;
  12. using UnityEngine;
  13. using UnityEngine.Experimental.AI;
  14. using UnityEngine.UI;
  15. using ZIM;
  16. using ZIM.Unity;
  17. using static SLAMUVC.UVCManager;
  18. using Color = UnityEngine.Color;
  19. using Time = UnityEngine.Time;
  20. [RequireComponent(typeof(Canvas))]
  21. public partial class ScreenLocate : o0InfraredCameraHandler
  22. {
  23. public InfraredCameraHelper InfraredCameraHelper;
  24. private const string TAG = "ScreenLocate#";
  25. enum Mode
  26. {
  27. InfraredLocate,
  28. ScreenMap,
  29. ScreenLocateManual
  30. }
  31. enum Platform
  32. {
  33. Window,
  34. Android
  35. }
  36. Platform mPlatform = Platform.Android;
  37. public enum ScreenIdentificationTag
  38. {
  39. // 屏幕定位的方式,手动、半自动、自动
  40. Manual,
  41. SemiAuto,
  42. Auto
  43. }
  44. // 2个灯,顺序根据红外灯的大小 由大到小, 坐标通过 InfraredSpot.ScreenUV 和 InfraredSpot.CameraLocation 获得
  45. public InfraredSpot[] InfraredSpots
  46. {
  47. get
  48. {
  49. return infraredSpotBuffer;
  50. }
  51. }
  52. // 1个灯, 坐标通过 InfraredSpot.ScreenUV 和 InfraredSpot.CameraLocation 获得
  53. public InfraredSpot InfraredSpotSingle
  54. {
  55. get
  56. {
  57. return infraredSpotBuffer[0];
  58. }
  59. }
  60. public InfraredSpot[] infraredSpotBuffer => IsSinglePoint()?infraredLocate.InfraredSpotBuffer: infraredLocate.InfraredSpotBuffer.Reverse().ToArray();
  61. // 当前 应用/生效 的屏幕四边形数据(QuadrilateralInCamera类)
  62. public QuadrilateralInCamera CurrentScreenQuad => screenIdentification.Screen.QuadInCamera;
  63. /// <summary>
  64. /// 定位之后,可能有3种结果(手动、半自动、自动),从中选择一种作为最终识别到的屏幕。
  65. /// 如果选择的是null,即没有识别到屏幕,则返回false,否则返回true
  66. /// </summary>
  67. public bool SelectScreenAfterLocate(ScreenIdentificationTag tag) => ScreenIdentification.SelectScreenAfterLocate(tag);
  68. /// 上一次屏幕定位的情况, 还未识别(或识别失败)的时候返回值是null
  69. public QuadrilateralInCamera LastQuadState(ScreenIdentificationTag tag)
  70. {
  71. QuadrilateralInCamera target = tag switch
  72. {
  73. ScreenLocate.ScreenIdentificationTag.Manual => ScreenIdentification.QuadManual,
  74. ScreenLocate.ScreenIdentificationTag.SemiAuto => ScreenIdentification.QuadSemiAuto,
  75. ScreenLocate.ScreenIdentificationTag.Auto => ScreenIdentification.QuadAuto,
  76. _ => null
  77. };
  78. return target;
  79. }
  80. /// <summary>
  81. /// 上一次半自动识别的情况, 还未识别的时候数组是null
  82. /// 通过索引获取布尔值,false代表这条边识别失败(回退应用了手动数据), 0-下、1-右、2-上、3-左
  83. /// </summary>
  84. public bool[] LastQuadSemiAutoState() => screenIdentification.LastQuadSemiAutoState;
  85. /// <summary>
  86. /// 获取算法执行过程中输出的纹理,0原图,1半自动识别到的全部线段,2屏幕黑白色差,3识别结果,4屏幕色差叠加识别结果,5半自动时的备选线段
  87. /// </summary>
  88. public Texture2D[] OutputTextures => outputTexture2D;
  89. public Vector2 CameraLocationOffset
  90. {
  91. get=>infraredLocate.CameraLocationOffset;
  92. set{
  93. infraredLocate.SetCameraLocationOffset(value);
  94. }
  95. }
  96. public Vector2 UVOffset
  97. {
  98. get=>infraredLocate.UVOffset;
  99. set{
  100. infraredLocate.SetUVOffset(value);
  101. }
  102. }
  103. //用来记录最后一次更新的数据
  104. Vector2 OldCameraLocationOffset { get; set; } = new Vector2(0, 0);
  105. //用来记录最后一次更新的数据
  106. Vector2 OldUVOffset { get; set; } = new Vector2(0, 0);
  107. #region 双点情况
  108. public Vector2[] curCameraLocationOffsets
  109. {
  110. get => CameraLocationOffsets;
  111. set
  112. {
  113. // 确保数组大小正确
  114. if (value.Length == 2) // 假设你只需要两个元素
  115. {
  116. CameraLocationOffsets = value;
  117. }
  118. }
  119. }
  120. public Vector2[] curUVOffsets
  121. {
  122. get => UVOffsets;
  123. set
  124. {
  125. // 确保数组大小正确
  126. if (value.Length == 2) // 假设你只需要两个元素
  127. {
  128. UVOffsets = value;
  129. }
  130. }
  131. }
  132. // 用来记录最后一次更新的数据,改为数组形式,默认是零
  133. Vector2[] CameraLocationOffsets { get; set; } = new Vector2[2] { Vector2.zero, Vector2.zero };
  134. // 用来记录最后一次更新的数据,改为数组形式,默认是零
  135. Vector2[] UVOffsets { get; set; } = new Vector2[2] { Vector2.zero, Vector2.zero };
  136. // 用来记录最后一次更新的数据,改为数组形式,默认是零
  137. Vector2[] OldCameraLocationOffsets { get; set; } = new Vector2[2] { Vector2.zero, Vector2.zero };
  138. // 用来记录最后一次更新的数据,改为数组形式,默认是零
  139. Vector2[] OldUVOffsets { get; set; } = new Vector2[2] { Vector2.zero, Vector2.zero };
  140. #endregion
  141. //是否单点显示
  142. public bool bSinglePoint => infraredLocate.bSinglePoint;
  143. // public InfraredDemo InfraredDemoMain => FindObjectOfType<InfraredDemo>();
  144. #region UVC 处理的对象
  145. //public UVCManager mUVCManager;
  146. public CameraInfo mUVCCameraInfo;
  147. public bool getUVCCameraInfo => mUVCCameraInfo != null ? true : false;
  148. public Vector2 getUVCCameraInfoSize => getUVCCameraInfo ? mUVCCameraInfo.Size : new Vector2(320, 240);
  149. private Texture mUVCTexture;
  150. public Texture getUVCTexture => mUVCTexture;
  151. public Texture setUVCTexture
  152. {
  153. set
  154. {
  155. mUVCTexture = value;
  156. }
  157. }
  158. private Texture2D mUVCTexture2D;
  159. // [SerializeField] Texture2DArray mUVCOutArray;
  160. #endregion
  161. public Text Info;
  162. public List<RectTransform> CrosshairInCamera;
  163. public List<RectTransform> CrosshairInScreen;
  164. public RectTransform ScreenQuad;
  165. public Toggle SaveToggle;
  166. public Toggle FullScreenToggle;
  167. public Toggle SingleToggle;
  168. public LineGenerator UILineGenerator;
  169. public bool ShowScreenQuad = false;
  170. // 显示在demo上的rawImage
  171. public List<RawImage> outputRawImages;
  172. readonly Texture2D[] outputTexture2D = new Texture2D[8];
  173. public RawImage FullScreenImage;
  174. public PixelCheaker ScreenPixelCheaker;
  175. public List<Texture2D> DebugScreenImages = new List<Texture2D>();
  176. public bool DebugOnZIMDemo = false;
  177. // private SynchronizationContext mainContext;
  178. public float ReDoLocateCalibrationRatio { get; private set; } // 半自动定位时校准的距离比例,以手动的结果来校准,离手动太远的线段会被舍弃
  179. public void SetCameraSize(Vector size) => cameraSize = size;
  180. public override Vector CameraSize => cameraSize;
  181. // 记录算法中的CameraSize,红外识别和屏幕识别都会使用到
  182. Vector cameraSize;
  183. bool bIdentifyRed = true;//默认设备红色
  184. bool bIdentifyGreen = true;
  185. #region 性能检测相关
  186. public Text m_UITime;
  187. const float m_UIUpdateInterval = 0.1f;
  188. float m_UIUpdateTimer = 0.0f;
  189. List<float> m_History = new List<float>(100);
  190. int m_ValidHistoryFrames = 0;
  191. float m_AverageTime = float.NaN;
  192. float m_MedianTime = float.NaN;
  193. float m_MinTime = float.NaN;
  194. float m_MaxTime = float.NaN;
  195. public float updateInterval = 0.5F;
  196. private double lastInterval;
  197. private int frames = 0;
  198. private float fps;
  199. public Text m_FPS;
  200. #endregion
  201. #region PC部分参数
  202. //亮度
  203. public float pcBrightness { get; set; } = 0.0f;
  204. //对比度
  205. public float pcContrast { get; set; } = 0.0f;
  206. #endregion
  207. // 红外灯识别算法
  208. InfraredLocate infraredLocate;
  209. // 屏幕识别算法
  210. o0.Project.ScreenIdentification screenIdentification;
  211. public o0.Project.ScreenIdentification ScreenIdentification => screenIdentification;
  212. RectTransform canvas;
  213. Mode mode;
  214. //List<(Vector2 pos, GameObject go)> pointManual = new List<(Vector2, GameObject)>();
  215. //o0.Project.WebCam o0WebCam = null;
  216. /// <summary>
  217. /// 正在识别的状态,自动识别时候记录
  218. /// </summary>
  219. bool bAutomaticRecognition { get; set; } = false;//进行捕获时
  220. bool bAutomaticRecognitionStart { get; set; } = false;//是否进行捕获
  221. bool bAutomaticRecognitionEnd { get; set; } = false;//是否结束捕获
  222. [NonSerialized] public RectTransform BackQuad = null;
  223. public Material quadMaskMat;
  224. static public ScreenLocate Main { get; private set; }
  225. static public void AutoLightPixels(Color[] pixels, int width, int height)
  226. {
  227. if (Main.DebugOnZIMDemo)
  228. {
  229. var newTex = pixels.zimAutoLightSimple(width, height);
  230. DebugTexture(7, newTex);
  231. try
  232. {
  233. Main.FullScreenImage.texture = newTex;
  234. }
  235. catch { }
  236. }
  237. }
  238. static public void DebugTexture(int index, Texture2D texture)
  239. {
  240. LateDestory(Main.outputTexture2D[index]);
  241. Main.outputTexture2D[index] = texture;
  242. try
  243. {
  244. Main.outputRawImages[index].texture = texture;
  245. }
  246. catch { }
  247. }
  248. static void LateDestory(UnityEngine.Object o) => Main.StartCoroutine(Main.LateDestoryIEnum(o));
  249. static public void SetScreen(UnityEngine.Color? color = null)
  250. {
  251. if (Main.BackQuad == null)
  252. {
  253. var canvas = GameObject.Find("WebCameraView").GetComponent<RectTransform>();
  254. var background = canvas.Find("Background");
  255. Main.BackQuad = background.GetChild(0).GetComponent<RectTransform>();
  256. }
  257. Main.BackQuad.parent.gameObject.SetActive(color != null);
  258. Main.BackQuad.GetComponent<RawImage>().color = color ?? Color.black;
  259. //Debug.Log("Set Screen " + color.GetColorName());
  260. }
  261. static public void SetScreen(Rect rect, UnityEngine.Color? color = null)
  262. {
  263. if (Main.BackQuad == null)
  264. {
  265. var canvas = GameObject.Find("WebCameraView").GetComponent<RectTransform>();
  266. var background = canvas.Find("Background");
  267. Main.BackQuad = background.GetChild(0).GetComponent<RectTransform>();
  268. }
  269. Main.BackQuad.parent.gameObject.SetActive(color != null);
  270. Main.BackQuad.anchorMin = rect.min;
  271. Main.BackQuad.anchorMax = rect.max;
  272. Main.BackQuad.GetComponent<RawImage>().color = color ?? Color.black;
  273. //Debug.Log("Set Screen " + color.GetColorName());
  274. }
  275. static void DebugBackQuad(Rect? rect = null)
  276. {
  277. if (Main.BackQuad)
  278. {
  279. Main.BackQuad.parent.GetComponent<RawImage>().enabled = false;
  280. Main.BackQuad.GetComponent<RawImage>().color = Color.white;
  281. Main.BackQuad.parent.gameObject.SetActive(!Main.BackQuad.parent.gameObject.activeSelf);
  282. if (rect.HasValue)
  283. {
  284. Main.BackQuad.anchorMin = rect.Value.min;
  285. Main.BackQuad.anchorMax = rect.Value.max;
  286. }
  287. }
  288. }
  289. //public void ReSizeTexture(int width, int height)
  290. //{
  291. // Debug.Log("Cur mUVCTexture Size: [" + mUVCTexture.width + "," + mUVCTexture.height + "]");
  292. // if (mUVCTexture.width < width || mUVCTexture.height < height) // 如果当前分辨率太小,则重新new一个texture
  293. // {
  294. // Texture2D tex = new Texture2D(
  295. // width, height,
  296. // TextureFormat.ARGB32,
  297. // false, /* mipmap */
  298. // true /* linear */);
  299. // tex.filterMode = FilterMode.Point;
  300. // tex.Apply();
  301. // mUVCTexture = tex;
  302. // mUVCCameraInfo.previewTexture = tex;
  303. // var nativeTexPtr = mUVCCameraInfo.previewTexture.GetNativeTexturePtr();
  304. // }
  305. //}
  306. void Awake()
  307. {
  308. if (Main != null)
  309. throw new Exception("[ScreenLocaer] 不允许多个实例");
  310. Main = this;
  311. #if !UNITY_EDITOR_WIN
  312. DebugOnZIMDemo = false;
  313. #endif
  314. //if (mUVCDrawer)
  315. // mUVCDrawer.StartPreviewAction += UVCIsReady;
  316. //获取master
  317. quadMaskMat = new Material(Shader.Find("Custom/CameraPointsMaskShader"));
  318. }
  319. void OnDestroy()
  320. {
  321. //if (mUVCDrawer)
  322. // mUVCDrawer.StartPreviewAction -= UVCIsReady;
  323. }
  324. void Start()
  325. {
  326. //mainContext = SynchronizationContext.Current;
  327. canvas = transform.GetComponent<RectTransform>();
  328. mode = Mode.InfraredLocate;
  329. if (DebugScreenImages.Count != 0 && DebugOnZIMDemo)
  330. {
  331. screenIdentification = new o0.Project.ScreenIdentification(this);
  332. screenIdentification.LocateScreen();
  333. }
  334. ReDoLocateCalibrationRatio = 0.125f;
  335. #region 性能检测相关
  336. for (var i = 0; i < m_History.Capacity; ++i)
  337. {
  338. m_History.Add(0.0f);
  339. }
  340. lastInterval = Time.realtimeSinceStartup;
  341. frames = 0;
  342. #endregion
  343. }
  344. // 初始化算法
  345. bool bInitScreenIdentificationAndInfraredLocate = false;
  346. void AlgorithmInit()
  347. {
  348. if (screenIdentification == null)
  349. {
  350. screenIdentification = new o0.Project.ScreenIdentification(this);
  351. Debug.Log("[ScreenLocate] 初始化屏幕识别");
  352. //screenIdentification.OnLocateScreenEnter += OnLocateScreenEnter;
  353. screenIdentification.OnLocateScreenEnd += OnLocateScreenEnd;
  354. //初始化屏幕数据
  355. InfraredCameraHelper?.InitScreenLocateManual();
  356. }
  357. if (infraredLocate == null)
  358. {
  359. infraredLocate = new InfraredLocate(this, screenIdentification.Screen);
  360. cameraSize = new Vector(getUVCCameraInfoSize.x, getUVCCameraInfoSize.y);
  361. InfraredSpot.RefreshMinVerifyLength(new o0.Geometry2D.Float.Vector(getUVCCameraInfoSize.x, getUVCCameraInfoSize.y));
  362. Debug.Log($"[ScreenLocate] 初始化红外灯识别, 当前相机分辨率: {CameraSize.x}×{CameraSize.y},红外算法追踪距离: {InfraredSpot.MinVerifyLength}");
  363. //InfraredDemo 初始化
  364. //float redfilterValue = PlayerPrefs.GetFloat("Init redFilterSliderValue", 0.8f);
  365. //Debug.Log("Init Red filterValue:" + redfilterValue);
  366. //infraredLocate.SetBrightnessThreshold(redfilterValue); // 参数是 红外灯的亮度阈值,阈值越小能够检测到的亮度就越低,默认值是0.93
  367. // UI相关
  368. if (SingleToggle != null)
  369. {
  370. infraredLocate.SetSinglePoint(SingleToggle.isOn);
  371. SingleToggle.onValueChanged.AddListener((i) =>
  372. {
  373. infraredLocate.SetSinglePoint(i);
  374. });
  375. }
  376. }
  377. if (screenIdentification != null && infraredLocate != null && !bInitScreenIdentificationAndInfraredLocate)
  378. {
  379. InfraredCameraHelper?.InvokeOnScreenLocateIsReady();
  380. bInitScreenIdentificationAndInfraredLocate = true;
  381. }
  382. }
  383. IEnumerator LateDestoryIEnum(UnityEngine.Object o)
  384. {
  385. if (o)
  386. {
  387. yield return new WaitForEndOfFrame();
  388. Destroy(o);
  389. }
  390. }
  391. //ZIMWebCamera场景使用
  392. public void WebCamIsReady(Texture texture)
  393. {
  394. mPlatform = Platform.Window;
  395. mUVCTexture = texture;
  396. mUVCCameraInfo = new CameraInfo(mUVCTexture);
  397. brightness = 0;
  398. //UVC准备好
  399. InfraredCameraHelper?.InvokeOnUVCIsReady(mUVCCameraInfo);
  400. }
  401. /// <summary>
  402. /// UVCManager 创建初始化时候,更新此函数
  403. /// </summary>
  404. /// <param name="cameraInfo"></param>
  405. public void UVCIsReady(CameraInfo cameraInfo)
  406. {
  407. mPlatform = Platform.Android;
  408. mUVCTexture = cameraInfo.previewTexture;
  409. mUVCCameraInfo = cameraInfo;
  410. Debug.Log("UVCIsReady:" + mUVCCameraInfo);
  411. //UVC准备好
  412. InfraredCameraHelper?.InvokeOnUVCIsReady(mUVCCameraInfo);
  413. }
  414. /// <summary>
  415. /// 获取新的 previewTexture
  416. /// </summary>
  417. public void UVCUpdate(bool bChange)
  418. {
  419. mUVCTexture = mUVCCameraInfo.previewTexture;
  420. Debug.Log("[ScreenLocate] UVCUpdate:" + mUVCCameraInfo + ",bChange:" + bChange);
  421. InfraredCameraHelper?.InvokeOnUVCIsUpdate();
  422. //这里判断是否进入自动识别?
  423. if (bAutomaticRecognitionStart)
  424. {
  425. bAutomaticRecognitionStart = false;
  426. Debug.Log("[ScreenLocate] UVCUpdate 开始自动识别 Capture:" + Capture + " ,Delay: " + Delay);
  427. screenIdentification.LocateScreen(Capture, Delay);
  428. }
  429. if (bAutomaticRecognitionEnd)
  430. {
  431. bAutomaticRecognitionEnd = false;
  432. Debug.Log("[ScreenLocate] UVCUpdate 结束捕获,当前摄像机分辨率为: " + mUVCCameraInfo.Size);
  433. bAutomaticRecognition = false;
  434. }
  435. }
  436. /// <summary>
  437. /// 选择模式后更新 quadUnityVectorList
  438. /// </summary>
  439. public void UpdateQuadUnityVectorList()
  440. {
  441. quadUnityVectorList = screenIdentification.Screen.QuadInCamera.GetUnityVertexNormalizedList();
  442. SaveScreenLocateVectorList();
  443. }
  444. int brightness = 0;
  445. /// <summary>
  446. /// 设置算法红外灯的亮度值
  447. /// </summary>
  448. /// <param name="value"></param>
  449. public void SetInfraredLocateBrightnessThreshold(float value)
  450. {
  451. if (infraredLocate != null)
  452. infraredLocate.SetBrightnessThreshold(value); // 参数是 红外灯的亮度阈值,阈值越小能够检测到的亮度就越低,默认值是0.93
  453. }
  454. void Update()
  455. {
  456. //++frames;
  457. //float timeNow = Time.realtimeSinceStartup;
  458. //if (timeNow > lastInterval + updateInterval)
  459. //{
  460. // fps = (float)(frames / (timeNow - lastInterval));
  461. // frames = 0;
  462. // lastInterval = timeNow;
  463. //}
  464. //if (m_FPS != null)
  465. // m_FPS.text = "FPS:" + fps.ToString("f2");
  466. if (mUVCCameraInfo == null) return;
  467. AlgorithmInit();
  468. if (mUVCCameraInfo != null && mUVCCameraInfo.IsPreviewing)
  469. {
  470. //根据getUVCCameraInfoSize 分辨率渲染
  471. CreateUVCTexture2DIfNeeded((int)getUVCCameraInfoSize.x, (int)getUVCCameraInfoSize.y);
  472. if (!screenIdentification.Update(mUVCTexture2D))
  473. {
  474. // 同步分辨率, 分辨率变化后还需同步到InfraredDemo
  475. if (RefreshCameraSize())
  476. {
  477. if (screenIdentification.Screen.QuadInCamera != null)
  478. {
  479. quadUnityVectorList = screenIdentification.Screen.QuadInCamera.GetUnityVertexNormalizedList();
  480. if (!ContainsNaN(quadUnityVectorList))
  481. {
  482. SaveScreenLocateVectorList();
  483. //SyncInfraredDemo();
  484. //SyncInfraredScreenPositioningView();
  485. InfraredCameraHelper?.InvokeOnUVCPosUpdate(quadUnityVectorList);
  486. Debug.Log("[ScreenLocate] RefreshCameraSize 屏幕size改变:[" + (int)getUVCCameraInfoSize.x + "," + (int)getUVCCameraInfoSize.y + "]");
  487. Debug.Log("[ScreenLocate] RefreshCameraSize 屏幕size改变,刷新quadUnityVectorList:" + PrintVector2List(quadUnityVectorList));
  488. }
  489. else
  490. {
  491. Debug.LogError("[ScreenLocate] RefreshCameraSize 屏幕size改变,存在NaN值,重新校准:" + PrintVector2List(quadUnityVectorList));
  492. }
  493. }
  494. if (DebugOnZIMDemo)
  495. Main.ShowScreen(screenIdentification.Screen.QuadInCamera);
  496. }
  497. if (CameraSize.x != mUVCTexture2D.width || CameraSize.y != mUVCTexture2D.height)
  498. {
  499. Debug.Log($"<color=red>[ScreenLocate] 分辨率不匹配,相机分辨率为: {getUVCCameraInfoSize}, mUVCTexture2D纹理尺寸: {mUVCTexture2D.width}×{mUVCTexture2D.height}</color>");
  500. return;
  501. }
  502. // 获取像素,用于后续操作
  503. var pixels = mUVCTexture2D.GetPixels(); // 从左往右、从下往上
  504. AutoLightPixels(pixels, CameraWidth, CameraHeight);
  505. InfraredUpdate(pixels);
  506. if (mode == Mode.ScreenLocateManual)
  507. {
  508. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  509. {
  510. if (infraredSpotBuffer[i].CameraLocation != null)
  511. {
  512. // 检测到光点
  513. var posInCanvas = infraredSpotBuffer[i].CameraLocation.Value.pixelToLocalPosition_AnchorCenter(CameraSize, FullScreenImage.rectTransform.rect);
  514. CrosshairInCamera[i].gameObject.SetActive(true);
  515. CrosshairInCamera[i].anchoredPosition = posInCanvas;
  516. }
  517. else
  518. CrosshairInCamera[i].gameObject.SetActive(false);
  519. }
  520. }
  521. else if (mode == Mode.InfraredLocate)
  522. {
  523. if (mPlatform == Platform.Window) //渲染ui上面的点。进入游戏可以隐藏
  524. {
  525. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  526. {
  527. if (infraredSpotBuffer[i].CameraLocation != null)
  528. {
  529. // 检测到光点
  530. var posInCanvas = infraredSpotBuffer[i].CameraLocation.Value.pixelToLocalPosition_AnchorCenter(CameraSize, outputRawImages[0].rectTransform.rect);
  531. CrosshairInCamera[i].gameObject.SetActive(true);
  532. CrosshairInCamera[i].anchoredPosition = posInCanvas;
  533. }
  534. else
  535. CrosshairInCamera[i].gameObject.SetActive(false);
  536. }
  537. }
  538. //手机端使用 mPlatform == Platform.Android &&
  539. //通用,手机 和 PC
  540. if (infraredSpotBuffer.Length > 0)
  541. {
  542. //int redIndex = 0;
  543. //int greenIndex = 1;
  544. //仅仅第一个点显示(如果最大点出界了会闪烁)
  545. //if (bSinglePoint)
  546. //{
  547. // redIndex = 0; //单点识别是,可以选择切换颜色
  548. // if (infraredSpotBuffer[redIndex].ScreenUV != null)
  549. // {
  550. // string str = "Single:";
  551. // Info.text = str + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  552. // //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  553. // onFilterPos(infraredSpotBuffer[redIndex].ScreenUV.Value);
  554. // }
  555. //}
  556. //else
  557. //{
  558. // //雙點模式下選擇第一個點
  559. // if (bIdentifyRed && !bIdentifyGreen)
  560. // {
  561. // if (infraredSpotBuffer[redIndex].ScreenUV != null)
  562. // {
  563. // Info.text = "Red" + redIndex + ":" + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  564. // //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  565. // onFilterPos2(infraredSpotBuffer[redIndex].ScreenUV.Value, redIndex);
  566. // }
  567. // else
  568. // {
  569. // Info.text = "未检测到红色最大点!";
  570. // }
  571. // }
  572. // else if (!bIdentifyRed && bIdentifyGreen)
  573. // {
  574. // if (infraredSpotBuffer[greenIndex].ScreenUV != null)
  575. // {
  576. // Info.text = "Green:" + infraredSpotBuffer[greenIndex].ScreenUV.Value.ToString("F4");
  577. // //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[greenIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[greenIndex].ScreenUV.Value.y * Screen.height, 0));
  578. // onFilterPos2(infraredSpotBuffer[greenIndex].ScreenUV.Value, greenIndex);
  579. // }
  580. // else
  581. // {
  582. // Info.text = "未检测到绿色点!";
  583. // }
  584. // }
  585. // else
  586. // {
  587. // //两个不选择和两个全选都跑识别两个点
  588. // //自動切換 检测到光点
  589. // if (infraredSpotBuffer[redIndex].ScreenUV != null)
  590. // {
  591. // Info.text = "Red:" + infraredSpotBuffer[redIndex].ScreenUV.Value.ToString("F4");
  592. // //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[redIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[redIndex].ScreenUV.Value.y * Screen.height, 0));
  593. // onFilterPos2(infraredSpotBuffer[redIndex].ScreenUV.Value, redIndex);
  594. // }
  595. // else if (infraredSpotBuffer[greenIndex].ScreenUV != null)
  596. // {
  597. // Info.text = "Green:" + infraredSpotBuffer[greenIndex].ScreenUV.Value.ToString("F4");
  598. // //InfraredManager.ConnetDevicesSingle.ins.posAction?.Invoke(new Vector3(infraredSpotBuffer[greenIndex].ScreenUV.Value.x * Screen.width, infraredSpotBuffer[greenIndex].ScreenUV.Value.y * Screen.height, 0));
  599. // onFilterPos2(infraredSpotBuffer[greenIndex].ScreenUV.Value, greenIndex);
  600. // }
  601. // else
  602. // {
  603. // Info.text = "未检测到点!";
  604. // }
  605. // }
  606. //}
  607. //仅仅第一个点显示(如果最大点出界了会闪烁)
  608. if (bSinglePoint)
  609. {
  610. int redIndex = 0; //单点识别是,可以选择切换颜色
  611. if (infraredSpotBuffer[redIndex].ScreenUV != null)
  612. {
  613. onFilterPos(infraredSpotBuffer[redIndex].ScreenUV.Value);
  614. onCameraLocationToUVOffset(infraredSpotBuffer[redIndex].CameraLocation.Value);
  615. //单点使用sdk管理的变量
  616. InfraredCameraHelper?.InvokeOnPositionUpdate(targetPos, targetCameraLocation);
  617. }
  618. }
  619. else
  620. {
  621. //同时通知两个点的变化
  622. if (infraredSpotBuffer[0].ScreenUV != null) onFilterPos2(infraredSpotBuffer[0].ScreenUV.Value, 0);
  623. if (infraredSpotBuffer[1].ScreenUV != null) onFilterPos2(infraredSpotBuffer[1].ScreenUV.Value, 1);
  624. //传送一个cameraLocation
  625. if (infraredSpotBuffer[0].CameraLocation != null && infraredSpotBuffer[0].CameraLocation.HasValue) onCameraLocationToUVOffset(infraredSpotBuffer[0].CameraLocation.Value, 0);
  626. if (infraredSpotBuffer[1].CameraLocation != null && infraredSpotBuffer[1].CameraLocation.HasValue) onCameraLocationToUVOffset(infraredSpotBuffer[1].CameraLocation.Value, 1);
  627. //两个点的变化坐标过去,和一个未变化的数据
  628. InfraredCameraHelper?.InvokeOnPositionUpdate2(_targetPoints2, _targetCameraLocationPoints2);
  629. }
  630. }
  631. }
  632. else if (mode == Mode.ScreenMap && DebugOnZIMDemo)
  633. {
  634. for (int i = 0; i < infraredSpotBuffer.Length; i++)
  635. {
  636. if (infraredSpotBuffer[i].ScreenUV != null)
  637. {
  638. // 检测到光点
  639. var posInCanvas = infraredSpotBuffer[i].ScreenUV.Value.pixelToLocalPosition_AnchorCenter(new Vector2(1, 1), canvas.rect);
  640. CrosshairInScreen[i].gameObject.SetActive(true);
  641. CrosshairInScreen[i].anchoredPosition = posInCanvas;
  642. }
  643. else
  644. CrosshairInScreen[i].gameObject.SetActive(false);
  645. }
  646. if (Input.GetKeyDown(KeyCode.Escape))
  647. ToMode(Mode.InfraredLocate);
  648. }
  649. }
  650. }
  651. //var t1 = Time.realtimeSinceStartup;
  652. //var dt = t1 - t0;
  653. //m_History[m_ValidHistoryFrames % m_History.Count] = dt;
  654. //++m_ValidHistoryFrames;
  655. //m_UIUpdateTimer += Time.deltaTime;
  656. //if (m_UIUpdateTimer >= m_UIUpdateInterval)
  657. //{
  658. // m_UIUpdateTimer = 0.0f;
  659. // if (m_ValidHistoryFrames >= m_History.Count)
  660. // {
  661. // m_ValidHistoryFrames = 0;
  662. // m_AverageTime = 0.0f;
  663. // m_MinTime = float.PositiveInfinity;
  664. // m_MaxTime = float.NegativeInfinity;
  665. // {
  666. // for (var i = 0; i < m_History.Count; i++)
  667. // {
  668. // var time = m_History[i];
  669. // m_AverageTime += time;
  670. // m_MinTime = Mathf.Min(m_MinTime, time);
  671. // m_MaxTime = Mathf.Max(m_MaxTime, time);
  672. // }
  673. // m_AverageTime /= m_History.Count;
  674. // }
  675. // {
  676. // m_History.Sort();
  677. // // Odd-length history?
  678. // if ((m_History.Count & 1) != 0)
  679. // {
  680. // m_MedianTime = m_History[m_History.Count / 2];
  681. // }
  682. // else
  683. // {
  684. // m_MedianTime = (m_History[m_History.Count / 2] + m_History[m_History.Count / 2 - 1]) / 2.0f;
  685. // }
  686. // }
  687. // }
  688. // var statistics = $"{m_History.Count} 帧样本:\naverage: {m_AverageTime * 1000.0f:F2}ms\nmedian: {m_MedianTime * 1000.0f:F2}ms\nmin: {m_MinTime * 1000.0f:F2}ms\nmax: {m_MaxTime * 1000.0f:F2}ms\n";
  689. // //Method: {m_Method} {UnityEngine.SceneManagement.SceneManager.GetActiveScene().name} |
  690. // if (m_UITime != null)
  691. // m_UITime.text = $"Cam: {mUVCCameraInfo.CurrentWidth}x{mUVCCameraInfo.CurrentHeight}{(mUVCTexture2D? ",T2D:" : "")}{(mUVCTexture2D? mUVCTexture2D.width+ "x" : "")}{(mUVCTexture2D ? mUVCTexture2D.height:"")} \nLast Frame: {dt * 1000.0f:F2}ms \n{statistics}";
  692. //}
  693. //UpdateInputs();
  694. if (DebugOnZIMDemo)
  695. {
  696. if (Input.GetKeyDown(KeyCode.Z))
  697. SelectScreenAfterLocate(ScreenIdentificationTag.Manual);
  698. if (Input.GetKeyDown(KeyCode.X))
  699. SelectScreenAfterLocate(ScreenIdentificationTag.SemiAuto);
  700. if (Input.GetKeyDown(KeyCode.C))
  701. SelectScreenAfterLocate(ScreenIdentificationTag.Auto);
  702. }
  703. }
  704. public void InfraredUpdate(Color[] cameraPixels)
  705. {
  706. infraredLocate.InfraredUpdate(cameraPixels);
  707. if (DebugOnZIMDemo)
  708. DebugPixelSpotArea(infraredLocate.DebugAreas);
  709. }
  710. private bool RefreshCameraSize()
  711. {
  712. var sizeNew = new Vector(getUVCCameraInfoSize.x, getUVCCameraInfoSize.y);
  713. var sizeNewFloat = getUVCCameraInfoSize.o0Vector();
  714. if (sizeNew != CameraSize || (screenIdentification?.Screen?.QuadInCamera != null && sizeNewFloat != screenIdentification.Screen.QuadInCamera.CameraSize))
  715. {
  716. Debug.Log($"<color=aqua>[ScreenLocate] 分辨率变化,刷新分辨率(from {CameraSize.x}×{CameraSize.y} to {sizeNew.x}×{sizeNew.y}), 是否有屏幕数据: {screenIdentification.Screen.QuadInCamera != null}, 是否有手动数据: {screenIdentification.QuadManual != null}</color>");
  717. // 同步相机分辨率
  718. cameraSize = sizeNew;
  719. screenIdentification.Screen.RefreshCameraSize(sizeNewFloat);
  720. screenIdentification.QuadAuto?.ReSize(sizeNewFloat, ScreenMap.ViewAspectRatioSetting);
  721. screenIdentification.QuadManual?.ReSize(sizeNewFloat, ScreenMap.ViewAspectRatioSetting);
  722. screenIdentification.QuadSemiAuto?.ReSize(sizeNewFloat, ScreenMap.ViewAspectRatioSetting);
  723. InfraredSpot.RefreshMinVerifyLength(sizeNewFloat);
  724. return true;
  725. }
  726. return false;
  727. }
  728. #region 单点双点目前不同情况处理,双点再sdk外计算存储
  729. //单点使用sdk变量
  730. Vector2 targetPos = Vector2.zero;
  731. Vector2 targetCameraLocation = Vector2.zero;
  732. public float filterDis = 3.0f;
  733. void onFilterPos(Vector2 _vector2Pos)
  734. {
  735. //添加一个偏移量,使得最后输出的准心是指向正中心
  736. Vector2 np = new Vector2((_vector2Pos.x - UVOffset.x) * Screen.width, (_vector2Pos.y - UVOffset.y) * Screen.height); //_vector2Pos.pixelToLocalPosition_AnchorCenter(Vector2.one, (transform as RectTransform).rect);
  737. if (Vector2.Distance(np, targetPos) >= filterDis)
  738. {
  739. targetPos = np;
  740. }
  741. }
  742. void onCameraLocationToUVOffset(Vector2 _vector2Pos)
  743. {
  744. Vector2 np = new Vector2(_vector2Pos.x - CameraLocationOffset.x, _vector2Pos.y - CameraLocationOffset.y);
  745. targetCameraLocation = np;
  746. }
  747. //双点使用本地代码处理
  748. Vector2[] _targetPoints2 = new Vector2[] { Vector2.zero, Vector2.zero };
  749. void onFilterPos2(Vector2 _vector2Pos, int index)
  750. {
  751. Vector2 np = new Vector2((_vector2Pos.x - UVOffsets[index].x) * Screen.width, (_vector2Pos.y - UVOffsets[index].y) * Screen.height);
  752. if (Vector2.Distance(np, _targetPoints2[index]) >= filterDis)
  753. {
  754. _targetPoints2[index] = np;
  755. }
  756. }
  757. Vector2[] _targetCameraLocationPoints2 = new Vector2[] { Vector2.zero, Vector2.zero };
  758. void onCameraLocationToUVOffset(Vector2 _vector2Pos, int index)
  759. {
  760. Vector2 np = new Vector2(_vector2Pos.x - CameraLocationOffsets[index].x, _vector2Pos.y - CameraLocationOffsets[index].y);
  761. _targetCameraLocationPoints2[index] = np;
  762. }
  763. #endregion
  764. public void DebugPixelSpotArea(List<PixelSpotArea> areas)
  765. {
  766. if (areas!=null)
  767. {
  768. Info.transform.GetChild(0).GetComponent<Text>().text = $"areas.Count: {areas.Count}";
  769. PixelSpotArea a0 = null; // 表示最大半径的区域
  770. PixelSpotArea a1 = null; // 表示第二大半径的区域
  771. foreach (var a in areas)
  772. {
  773. if (a0 == null || a.Radius > a0.Radius)
  774. {
  775. a1 = a0; // 更新第二大为之前最大
  776. a0 = a; // 更新最大为当前的
  777. }
  778. else if (a1 == null || a.Radius > a1.Radius)
  779. {
  780. a1 = a; // 更新第二大
  781. }
  782. }
  783. Texture2D texture = new Texture2D(CameraWidth, CameraHeight);
  784. Color[] blackPixels = new Color[texture.width * texture.height];
  785. for (int i = 0; i < blackPixels.Length; i++)
  786. blackPixels[i] = Color.black;
  787. texture.SetPixels(blackPixels);
  788. if (a0 != null)
  789. {
  790. foreach (var p in a0.Pixels0)
  791. texture.SetPixel((int)p.x, (int)p.y, Color.yellow);
  792. foreach (var p in a0.Pixels1)
  793. texture.SetPixel((int)p.x, (int)p.y, Color.white);
  794. }
  795. if (a1 != null)
  796. {
  797. foreach (var p in a1.Pixels0)
  798. texture.SetPixel((int)p.x, (int)p.y, Color.green);
  799. foreach (var p in a1.Pixels1)
  800. texture.SetPixel((int)p.x, (int)p.y, Color.blue);
  801. }
  802. texture.Apply();
  803. DebugTexture(6, texture);
  804. }
  805. }
  806. #region 自动识别
  807. int Capture = 30;
  808. int Delay = 30;
  809. Vector2 EnterResolution;
  810. // int DefaultResolutionIndex;
  811. // readonly public int HighScreenLocateResolutionIndex = 2; // 自动识别时,摄像机分辨率固定为1280 * 720 ( 对应索引是2 )
  812. public void BtnScreenLocate()
  813. {
  814. if (DebugScreenImages.Count != 0)
  815. {
  816. //screenIdentification = new o0.Project.ScreenIdentification();
  817. cameraSize = new Vector(DebugScreenImages[0].width, DebugScreenImages[0].height);
  818. WebCamIsReady(DebugScreenImages[0]);
  819. CreateUVCTexture2DIfNeeded();
  820. }
  821. //Debug.Log("BtnScreenLocate Capture:" + Capture + " ,Delay: " + Delay);
  822. //screenIdentification.LocateScreen(Capture, Delay);
  823. OnLocateScreenEnter();
  824. }
  825. // bool log1 = false, log2 = false;
  826. public void OnLocateScreenEnter()
  827. {
  828. bAutomaticRecognition = true;
  829. bAutomaticRecognitionStart = true;
  830. ResetScreenIdentification();
  831. //DefaultResolutionIndex = InfraredDemoMain?.ResolutionIndex ?? 0; // 记录一下进入前的分辨率(游戏场景的分辨率,比识别时更低)
  832. //HighScreenLocateResolutionIndex = InfraredDemoMain.getTextureToResolutionNewIndex(); // index = 0
  833. // Debug.Log("[ScreenLocate] 开始捕获 DefaultResolutionIndex:" + DefaultResolutionIndex + " ,HighScreenLocateResolutionIndex:" + HighScreenLocateResolutionIndex);
  834. // InfraredDemoMain?.SetResolutionNew(HighScreenLocateResolutionIndex);
  835. EnterResolution = mUVCCameraInfo.Size;// 记录一下进入前的分辨率(游戏场景的分辨率,比识别时更低)
  836. Vector2 _HighResolution = mUVCCameraInfo.CurrentCalibrationResolution; //最高的分辨率
  837. Resize((int)_HighResolution.x, (int)_HighResolution.y);
  838. //CreateUVCTexture2DIfNeeded();
  839. // log1 = true;
  840. // log2 = true;
  841. screenIdentification.LocateScreen(); // 自动识别开始的入口
  842. if (DebugOnZIMDemo)
  843. {
  844. var webCam = GetComponent<ZIMWebCamera>();
  845. webCam.AdjustResolution(1920, 1080);
  846. mUVCCameraInfo.SetSize(webCam.webCamTexture.width, webCam.webCamTexture.height);
  847. }
  848. }
  849. /// <summary>
  850. /// 屏幕识别结束
  851. /// </summary>
  852. public void OnLocateScreenEnd()
  853. {
  854. bAutomaticRecognitionEnd = true;
  855. // 记录本次屏幕识别的分辨率(目前采用高分辨率做识别,识别结束后调回低分辨率)
  856. //InfraredDemoMain?.SetResolutionNew(DefaultResolutionIndex);
  857. Resize((int)EnterResolution.x, (int)EnterResolution.y);
  858. if (DebugOnZIMDemo)
  859. {
  860. var webCam = GetComponent<ZIMWebCamera>();
  861. GetComponent<ZIMWebCamera>().AdjustResolution((int)EnterResolution.x, (int)EnterResolution.y);
  862. mUVCCameraInfo.SetSize(webCam.webCamTexture.width, webCam.webCamTexture.height);
  863. }
  864. // 前面的数据ContainsNaN(quadUnityVectorList) 如果没处理到,这里再进行判断一次
  865. // 识别结束后,也要判断半自动数据,即P5?
  866. if (screenIdentification.QuadAuto == null && screenIdentification.QuadSemiAuto == null)
  867. {
  868. InfraredCameraHelper?.InvokeOnUVCPosUpdate(new List<Vector2>());
  869. Debug.LogError("[ScreenLocate] OnLocateScreenEnd 屏幕未识别");
  870. }
  871. }
  872. /**
  873. * 修改相机的实际分辨率
  874. */
  875. public void Resize(int width, int height)
  876. {
  877. if (mUVCCameraInfo == null) return;
  878. #if UNITY_ANDROID || UNITY_IOS
  879. //发送修改指令给相机实际分辨率
  880. mUVCCameraInfo.SetCameraSize(width, height);
  881. #endif
  882. #if UNITY_STANDALONE_WIN
  883. // pc todo 看看怎么处理
  884. // ResizePC(width, height);
  885. #endif
  886. //mUVCCameraInfo.SetSize(width, height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  887. Debug.Log($"[ScreenLocate] 开始修改分辨率 mUVCCameraInfo origin:[{mUVCCameraInfo.CurrentWidth},{mUVCCameraInfo.CurrentHeight}]=>target:[{width},{height}]");
  888. // if (screenIdentification.isInitLocateScreen()) screenIdentification.bStartLocateScreen = true;
  889. }
  890. /// <summary>
  891. /// pc修改分辨率
  892. /// </summary>
  893. /// <param name="width"></param>
  894. /// <param name="height"></param>
  895. public void ResizePC(int width, int height)
  896. {
  897. if (mUVCCameraInfo == null) return;
  898. //if (screenIdentification.isInitLocateScreen()) screenIdentification.bStartLocateScreen = true;
  899. // PcWebCamera pcWebCamera = GetComponent<PcWebCamera>();
  900. // if(pcWebCamera.webCamTexture == null || !pcWebCamera.webCamTexture.isPlaying) return;
  901. //StartCoroutine(ResetWebCam(pcWebCamera, width, height));
  902. mUVCCameraInfo.SetSize(width, height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  903. Debug.Log("[ScreenLocate] Resize mUVCCameraInfo.SetSize: [" + mUVCCameraInfo.CurrentWidth + "," + mUVCCameraInfo.CurrentHeight + "]");
  904. }
  905. private System.Collections.IEnumerator ResetWebCam(PcWebCamera pcWebCamera, int newWidth, int newHeight)
  906. {
  907. WebCamTexture _webCamTexture = pcWebCamera.webCamTexture;
  908. // Stop the current WebCamTexture
  909. _webCamTexture.Stop();
  910. // Trigger OnWebCamStopped event
  911. // OnWebCamStopped?.Invoke();
  912. // Wait for a short time to ensure resources are released
  913. yield return new WaitForSeconds(0.5f);
  914. // Create a new WebCamTexture with the new dimensions
  915. _webCamTexture = new WebCamTexture(newWidth, newHeight);
  916. pcWebCamera.webCamTexture = _webCamTexture;
  917. mUVCTexture = _webCamTexture;
  918. // Restart the camera
  919. yield return StartCoroutine(StartWebCam(pcWebCamera));
  920. }
  921. private System.Collections.IEnumerator StartWebCam(PcWebCamera pcWebCamera)
  922. {
  923. WebCamTexture _webCamTexture = pcWebCamera.webCamTexture;
  924. _webCamTexture.Play();
  925. // Wait until the WebCamTexture is playing
  926. while (!_webCamTexture.isPlaying)
  927. {
  928. yield return null;
  929. }
  930. // Trigger OnWebCamStarted event
  931. //OnWebCamStarted?.Invoke();
  932. mUVCCameraInfo.SetSize(_webCamTexture.width, _webCamTexture.height); // 手动记录分辨率,这里可能会有问题 width和height是期望的分辨率而不是当前摄像机实际分辨率
  933. Debug.Log("[ScreenLocate] ResizePc mUVCCameraInfo.SetSize: [" + mUVCCameraInfo.CurrentWidth + "," + mUVCCameraInfo.CurrentHeight + "]");
  934. // if(screenIdentification.isInitLocateScreen())screenIdentification.bStartLocateScreen = true;
  935. }
  936. #endregion
  937. public void BtnScreenMap()
  938. {
  939. ToMode(Mode.ScreenMap);
  940. }
  941. //进入手动定位屏幕
  942. public void BtnScreenLocateManual()
  943. {
  944. ToMode(Mode.ScreenLocateManual);
  945. }
  946. // 重置屏幕识别的数据
  947. public void ResetScreenIdentification()
  948. {
  949. screenIdentification.Screen.Active = false;
  950. }
  951. // threshold 的值是0-1,0代表最近,1代表最远
  952. public void SetReDoLocateCalibrationRatio(float threshold)
  953. {
  954. const float MIN = 0.02f;
  955. const float MAX = 0.32f;
  956. ReDoLocateCalibrationRatio = MIN + (MAX - MIN) * threshold;
  957. }
  958. /// <summary>
  959. /// 固定的顶点顺序: 左下,右下,左上,右上
  960. /// </summary>
  961. public static List<Vector2> quadUnityVectorList = new();
  962. /// <summary>
  963. /// 打印信息
  964. /// </summary>
  965. /// <param name="list">左下,右下,左上,右上</param>
  966. /// <returns></returns>
  967. public string PrintVector2List(List<Vector2> list)
  968. {
  969. if (screenIdentification == null || !screenIdentification.Screen.Active) return "[]";
  970. string result = "";
  971. if (list.Count == 4)
  972. {
  973. result = "左下" + list[0].ToString() + ",右下" + list[1].ToString() + ",左上" + list[2].ToString() + ",右上" + list[3].ToString();
  974. }
  975. else
  976. {
  977. result = "count != 4 error";
  978. }
  979. //foreach (Vector2 vector in list)
  980. //{
  981. // result += vector.ToString() + " ";
  982. //}
  983. //Debug.Log(result);
  984. return result;
  985. }
  986. /// <summary>
  987. /// 判断是否存在NaN
  988. /// </summary>
  989. /// <param name="vectors"></param>
  990. /// <returns></returns>
  991. public bool ContainsNaN(List<Vector2> vectors)
  992. {
  993. foreach (var v in vectors)
  994. {
  995. if (float.IsNaN(v.x) || float.IsNaN(v.y))
  996. {
  997. return true;
  998. }
  999. }
  1000. return false;
  1001. }
  1002. // 标记屏幕的四个角, ScreenQuadObject 下挂了4个子节点用于标记
  1003. public void ShowScreen(RectTransform ScreenQuadObject, QuadrilateralInCamera screen)
  1004. {
  1005. if (screen == null)
  1006. {
  1007. Info.text = "识别屏幕失败";
  1008. return;
  1009. }
  1010. Info.text = "已识别到屏幕";
  1011. //if (ScreenQuadObject && ScreenQuadObject.childCount >= 4)
  1012. //{
  1013. // ScreenQuadObject.gameObject.SetActive(true);
  1014. // for (int i = 0; i < 4; i++)
  1015. // {
  1016. // if (DebugOnZIMDemo)
  1017. // {
  1018. // RectTransform t = ScreenQuadObject.GetChild(i) as RectTransform;
  1019. // t.anchoredPosition = screen.Quad[i].pixelToLocalPosition_AnchorCenter(screen.CameraSize, ScreenQuadObject.rect);
  1020. // }
  1021. // }
  1022. //}
  1023. quadUnityVectorList = screen.GetUnityVertexNormalizedList(); // 记录四个点
  1024. if (!ContainsNaN(quadUnityVectorList))
  1025. {
  1026. SaveScreenLocateVectorList();
  1027. //SyncInfraredDemo();
  1028. if (DebugOnZIMDemo)
  1029. SyncInfraredScreenPositioningView();
  1030. InfraredCameraHelper?.InvokeOnUVCPosUpdate(quadUnityVectorList);
  1031. Debug.Log("[ScreenLocate] ShowScreen 已识别到屏幕,更新quadUnityVectorList:" + PrintVector2List(quadUnityVectorList));
  1032. }
  1033. else
  1034. {
  1035. Debug.LogError("[ScreenLocate] RefreshCameraSize 屏幕size改变,存在NaN值,重新校准:" + PrintVector2List(quadUnityVectorList));
  1036. }
  1037. }
  1038. public void ShowScreen(QuadrilateralInCamera screen) => ShowScreen(ScreenQuad, screen);
  1039. /// <summary>
  1040. /// 校准点位置存储到本地
  1041. /// </summary>
  1042. static public void SaveScreenLocateVectorList()
  1043. {
  1044. //string saveStr = string.Join(";", quadUnityVectorList.Select(v => $"{v.x},{v.y}")); //,{v.z}
  1045. // 如果列表为空,保存空字符串或自定义标记
  1046. string saveStr = quadUnityVectorList.Count > 0 ? string.Join(";", quadUnityVectorList.Select(v => $"{v.x},{v.y}")) : "";
  1047. Debug.Log("SaveScreenLocateVectorList: " + saveStr);
  1048. PlayerPrefs.SetString("ScreenLocateVectorList", saveStr);
  1049. }
  1050. /// <summary>
  1051. /// 获取本地存储校准点位置
  1052. /// </summary>
  1053. static public bool GetScreenLocateVectorList()
  1054. {
  1055. string posListStr = PlayerPrefs.GetString("ScreenLocateVectorList", "");
  1056. Debug.Log("GetScreenLocateVectorList:" + posListStr);
  1057. if (!string.IsNullOrWhiteSpace(posListStr))
  1058. {
  1059. quadUnityVectorList.Clear();
  1060. quadUnityVectorList = posListStr.Split(';')
  1061. .Select(s =>
  1062. {
  1063. string[] parts = s.Split(',');
  1064. return new Vector2(float.Parse(parts[0]), float.Parse(parts[1]));
  1065. })
  1066. .ToList();
  1067. return true;
  1068. }
  1069. else return false;
  1070. }
  1071. public void SetSinglePoint(bool value)=> infraredLocate.SetSinglePoint(value);
  1072. public bool IsSinglePoint() => infraredLocate.IsSinglePoint();
  1073. public Vector2 AdjustPointsOffset(Vector2 inputPoint, string type = "CameraLocation") => infraredLocate.GetCenterOffset(inputPoint, type);
  1074. /// <summary>
  1075. /// 重置偏移量
  1076. /// </summary>
  1077. public void ResetPointsOffest() => infraredLocate.ResetCenterOffset();
  1078. /// <summary>
  1079. /// 初始化记录值
  1080. /// </summary>
  1081. /// <param name="inputPointOffset"></param>
  1082. /// <param name="type"></param>
  1083. /// <returns></returns>
  1084. public Vector2 SetPointsOffset(Vector2 inputPointOffset, string type = "CameraLocation")
  1085. {
  1086. // 计算从原始中心到输入点的偏移量
  1087. if (type == "CameraLocation")
  1088. {
  1089. OldCameraLocationOffset = CameraLocationOffset = inputPointOffset;
  1090. return CameraLocationOffset;
  1091. }
  1092. else
  1093. {
  1094. //ScreenUV
  1095. OldUVOffset = UVOffset = inputPointOffset;
  1096. return UVOffset;
  1097. }
  1098. }
  1099. /// <summary>
  1100. /// 撤销操作,
  1101. /// </summary>
  1102. public void RevokePointsOffest() {
  1103. CameraLocationOffset = OldCameraLocationOffset;
  1104. UVOffset = OldUVOffset;
  1105. }
  1106. /// <summary>
  1107. /// 这里计算一个偏移后的cameraLocatoin位置
  1108. /// </summary>
  1109. /// <param name="cameraLocatoin"></param>
  1110. /// <returns></returns>
  1111. public Vector2 GetOffsetCameraLocation(Vector2 cameraLocatoin)
  1112. {
  1113. return cameraLocatoin - CameraLocationOffset;
  1114. }
  1115. #region 双点下的操作函数
  1116. /// <summary>
  1117. /// 设置记录位置,双点
  1118. /// </summary>
  1119. /// <param name="playerType"></param>
  1120. /// <param name="inputPointOffset"></param>
  1121. /// <param name="type"></param>
  1122. /// <returns></returns>
  1123. public Vector2[] SetPointsOffsets(PlayerType playerType, Vector2 inputPointOffset, string type = "CameraLocation")
  1124. {
  1125. int index = 0;
  1126. if (playerType == PlayerType.FirstPlayer)
  1127. {
  1128. index = 0;
  1129. }
  1130. else
  1131. {
  1132. index = 1;
  1133. }
  1134. // 计算从原始中心到输入点的偏移量
  1135. if (type == "CameraLocation")
  1136. {
  1137. OldCameraLocationOffsets[index] = CameraLocationOffsets[index] = inputPointOffset;
  1138. return CameraLocationOffsets;
  1139. }
  1140. else
  1141. {
  1142. //ScreenUV
  1143. OldUVOffsets[index] = UVOffsets[index] = inputPointOffset;
  1144. return UVOffsets;
  1145. }
  1146. }
  1147. /// <summary>
  1148. /// 撤销操作,双点
  1149. /// </summary>
  1150. public void RevokePointsOffests()
  1151. {
  1152. CameraLocationOffsets = OldCameraLocationOffsets;
  1153. UVOffsets = OldUVOffsets;
  1154. }
  1155. public void ResetPointsOffests()
  1156. {
  1157. SetPointsOffsets(PlayerType.FirstPlayer, Vector2.zero, "CameraLocation");
  1158. SetPointsOffsets(PlayerType.SecondPlayer, Vector2.zero, "ScreenUV");
  1159. }
  1160. /// <summary>
  1161. /// 左右屏校准中心点时候调用
  1162. /// </summary>
  1163. /// <param name="inputPoint"></param>
  1164. /// <param name="type"></param>
  1165. /// <param name="isLeftScreen"></param>
  1166. /// <returns></returns>
  1167. public Vector2 AdjustPointsOffset(Vector2 inputPoint, string type = "CameraLocation", bool isLeftScreen = true)
  1168. {
  1169. Vector2 offset;
  1170. if (type == "CameraLocation")
  1171. {
  1172. // 计算 CameraLocation 的偏移量,考虑左右屏
  1173. Vector2 center = screenIdentification.Screen.TransformToCamera(
  1174. new Vector2(isLeftScreen ? 0.25f : 0.75f, 0.5f) * screenIdentification.Screen.UVSize
  1175. );
  1176. offset = inputPoint - center; // 偏移量 = 输入点 - 中心点
  1177. }
  1178. else
  1179. {
  1180. // UV 坐标系下的偏移量,考虑左右屏
  1181. Vector2 center = new Vector2(isLeftScreen ? 0.25f : 0.75f, 0.5f); // 左屏或右屏中心点
  1182. offset = inputPoint - center; // 偏移量 = 输入点 - 中心点
  1183. }
  1184. return offset;
  1185. }
  1186. #endregion
  1187. void ToMode(Mode mode)
  1188. {
  1189. if (this.mode == mode)
  1190. return;
  1191. if (mode == Mode.ScreenMap)
  1192. {
  1193. if (!screenIdentification.Screen.Active)
  1194. {
  1195. Info.text = "先定位屏幕";
  1196. return;
  1197. }
  1198. Info.text = "按ESC退出";
  1199. SetScreen(Color.black);
  1200. //Info.transform.SetAsLastSibling();
  1201. this.mode = Mode.ScreenMap;
  1202. }
  1203. else if (mode == Mode.InfraredLocate)
  1204. {
  1205. Info.text = screenIdentification.Screen.Active ? "已定位屏幕" : "定位屏幕失败";
  1206. //Info.text = "已识别到屏幕";
  1207. SetScreen(null);
  1208. foreach (var i in CrosshairInScreen)
  1209. i.gameObject.SetActive(false);
  1210. FullScreenImage.gameObject.SetActive(false);
  1211. ScreenPixelCheaker?.HideImage();
  1212. //Info.transform.SetSiblingIndex(transform.childCount - 4);
  1213. this.mode = Mode.InfraredLocate;
  1214. #if (!NDEBUG && DEBUG && ENABLE_LOG)
  1215. Console.WriteLine($"{TAG} Mode.InfraredLocate:已识别到屏幕:{screenIdentification.Screen.Active}");
  1216. #endif
  1217. }
  1218. else if (mode == Mode.ScreenLocateManual)
  1219. {
  1220. Info.text = "左键单击屏幕 左下角";
  1221. FullScreenImage.gameObject.SetActive(true);
  1222. ScreenPixelCheaker?.ShowImage();
  1223. //Info.transform.SetSiblingIndex(transform.childCount - 1);
  1224. // var newTex = WebCamera.webCamTexture.AutoLight(10);
  1225. //DebugTexture(1, TextureToTexture2D(rawImage.texture));
  1226. CreateUVCTexture2DIfNeeded();
  1227. DebugTexture(7, mUVCTexture2D.zimAutoLight(brightness));
  1228. //mUVCTexture2DTemp = TextureToTexture2D(mUVCCameraInfo.previewTexture);
  1229. //DebugTexture(6, mUVCTexture2DTemp.zimAutoLight(brightness));
  1230. this.mode = Mode.ScreenLocateManual;
  1231. }
  1232. }
  1233. private Texture2D TextureToTexture2D(Texture texture, int width = 0, int height = 0)
  1234. {
  1235. if (width == 0)
  1236. width = texture.width;
  1237. if (height == 0)
  1238. height = texture.height;
  1239. Texture2D _texture2D = new Texture2D(width, height, TextureFormat.ARGB32, false, true);
  1240. RenderTexture currentRT = RenderTexture.active;
  1241. RenderTexture renderTexture = RenderTexture.GetTemporary(
  1242. width,
  1243. height,
  1244. 0,
  1245. RenderTextureFormat.ARGB32,
  1246. RenderTextureReadWrite.Linear);
  1247. Graphics.Blit(texture, renderTexture);
  1248. RenderTexture.active = renderTexture;
  1249. _texture2D.ReadPixels(new Rect(0, 0, width, height), 0, 0);
  1250. _texture2D.Apply();
  1251. RenderTexture.active = currentRT;
  1252. RenderTexture.ReleaseTemporary(renderTexture);
  1253. return _texture2D;
  1254. }
  1255. /// <summary>
  1256. /// 制作一个填充黑边的texture,把识别点之后的外围区域填充成黑色部分,用shader处理
  1257. /// </summary>
  1258. /// <param name="inputTexture"></param>
  1259. /// <param name="shaderMat"></param>
  1260. /// <param name="quadUV"></param>
  1261. /// <param name="width"></param>
  1262. /// <param name="height"></param>
  1263. /// <returns></returns>
  1264. private Texture2D TextureToTexture2DWithShader(Texture inputTexture, Material shaderMat, List<Vector2> quadUV, int width = 0, int height = 0)
  1265. {
  1266. if (width == 0) width = inputTexture.width;
  1267. if (height == 0) height = inputTexture.height;
  1268. // 设置四点裁剪(传给 shader)
  1269. shaderMat.SetVector("_QuadP0", quadUV[0]);
  1270. shaderMat.SetVector("_QuadP1", quadUV[1]);
  1271. shaderMat.SetVector("_QuadP2", quadUV[2]);
  1272. shaderMat.SetVector("_QuadP3", quadUV[3]);
  1273. // 创建临时 RT
  1274. RenderTexture currentRT = RenderTexture.active;
  1275. RenderTexture tempRT = RenderTexture.GetTemporary(
  1276. width, height, 0, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear);
  1277. // Blit 带 Shader 的处理
  1278. Graphics.Blit(inputTexture, tempRT, shaderMat);
  1279. // 从 RT 读取像素到 Texture2D
  1280. Texture2D resultTex = new Texture2D(width, height, TextureFormat.ARGB32, false, true);
  1281. RenderTexture.active = tempRT;
  1282. resultTex.ReadPixels(new Rect(0, 0, width, height), 0, 0);
  1283. resultTex.Apply();
  1284. // 清理
  1285. RenderTexture.active = currentRT;
  1286. RenderTexture.ReleaseTemporary(tempRT);
  1287. return resultTex;
  1288. }
  1289. //public void CreateUVCTexture2DFocusSizeIfNeeded(int width, int height)
  1290. //{
  1291. // if (mUVCTexture2D != null)
  1292. // Destroy(mUVCTexture2D);
  1293. // mUVCTexture2D = TextureToTexture2D(mUVCTexture, width, height);
  1294. //}
  1295. /// <summary>
  1296. /// 使用默认的mUVCTexture宽高
  1297. /// </summary>
  1298. private void CreateUVCTexture2DIfNeeded()
  1299. {
  1300. if (mUVCTexture2D != null)
  1301. Destroy(mUVCTexture2D);
  1302. mUVCTexture2D = TextureToTexture2D(mUVCTexture);
  1303. }
  1304. /// <summary>
  1305. /// 根据宽高调整mUVCTexture2D
  1306. /// </summary>
  1307. /// <param name="width"></param>
  1308. /// <param name="height"></param>
  1309. private void CreateUVCTexture2DIfNeeded(int width = 0, int height = 0)
  1310. {
  1311. if (mUVCTexture2D != null)
  1312. Destroy(mUVCTexture2D);
  1313. // 获取 Quad
  1314. var quadInCamera = screenIdentification?.Screen?.QuadInCamera;
  1315. if (quadInCamera != null)
  1316. {
  1317. List<Vector2> quad = quadInCamera.GetUnityVertexNormalizedList();
  1318. if (quad != null && quad.Count == 4)
  1319. {
  1320. // 原顺序:左下、右下、左上、右上
  1321. mUVCTexture2D = TextureToTexture2DWithShader(mUVCTexture, quadMaskMat, quad);
  1322. // 纹理测试显示
  1323. InfraredDemo._ins.MyCameraRender6.texture = mUVCTexture2D;
  1324. return;
  1325. }
  1326. }
  1327. // 无效 quad 或为空时直接复制原始纹理
  1328. mUVCTexture2D = TextureToTexture2D(mUVCTexture, width, height);
  1329. }
  1330. #region DoubleButton
  1331. private DateTime m_firstTime;
  1332. private DateTime m_secondTime;
  1333. private void Press()
  1334. {
  1335. Debug.Log("进入手动定位");
  1336. BtnScreenLocateManual();
  1337. resetTime();
  1338. }
  1339. public void OnDoubleClick()
  1340. {
  1341. //超时重置
  1342. if (!m_firstTime.Equals(default(DateTime)))
  1343. {
  1344. var intervalTime = DateTime.Now - m_firstTime;
  1345. float milliSeconds = intervalTime.Seconds * 1000 + intervalTime.Milliseconds;
  1346. if (milliSeconds >= 400)
  1347. resetTime();
  1348. }
  1349. // 按下按钮时对两次的时间进行记录
  1350. if (m_firstTime.Equals(default(DateTime)))
  1351. m_firstTime = DateTime.Now;
  1352. else
  1353. m_secondTime = DateTime.Now;
  1354. // 在第二次点击触发,时差小于400ms触发
  1355. if (!m_firstTime.Equals(default(DateTime)) && !m_secondTime.Equals(default(DateTime)))
  1356. {
  1357. var intervalTime = m_secondTime - m_firstTime;
  1358. float milliSeconds = intervalTime.Seconds * 1000 + intervalTime.Milliseconds;
  1359. if (milliSeconds < 400)
  1360. Press();
  1361. else
  1362. resetTime();
  1363. }
  1364. }
  1365. private void resetTime()
  1366. {
  1367. m_firstTime = default(DateTime);
  1368. m_secondTime = default(DateTime);
  1369. }
  1370. #endregion
  1371. #region 性能检测相关
  1372. void InvalidateTimings()
  1373. {
  1374. m_ValidHistoryFrames = 0;
  1375. m_AverageTime = float.NaN;
  1376. m_MedianTime = float.NaN;
  1377. m_MinTime = float.NaN;
  1378. m_MaxTime = float.NaN;
  1379. }
  1380. void UpdateInputs()
  1381. {
  1382. //重置
  1383. if (Input.GetKeyDown(KeyCode.UpArrow))
  1384. {
  1385. InvalidateTimings();
  1386. }
  1387. }
  1388. #endregion
  1389. }