ScreenIdentification.cs 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800
  1. #define ENABLE_LOG
  2. using o0.Geometry2D.Float;
  3. using System;
  4. using System.Collections;
  5. using System.Collections.Generic;
  6. using System.IO;
  7. using System.Threading.Tasks;
  8. using UnityEngine;
  9. using UnityStandardAssets.ImageEffects;
  10. using ZIM;
  11. using ZIM.Unity;
  12. namespace o0.Project
  13. {
  14. public partial class ScreenIdentification
  15. {
  16. private const string TAG = "ScreenIdentification#";
  17. //static Rect[][] LocateAreaData = new Rect[][] {
  18. // new Rect[] { new Rect(0f, 0f, 0.3f, 0.3f), new Rect(0f, 0f, 0.4f, 0.4f), new Rect(0f, 0f, 0.5f, 0.5f), new Rect(0f, 0f, 0.6f, 0.6f) },
  19. // new Rect[] { new Rect(0.7f, 0f, 0.3f, 0.3f), new Rect(0.6f, 0f, 0.4f, 0.4f), new Rect(0.5f, 0f, 0.5f, 0.5f), new Rect(0.4f, 0f, 0.6f, 0.6f) },
  20. // new Rect[] { new Rect(0f, 0.7f, 0.3f, 0.3f), new Rect(0f, 0.6f, 0.4f, 0.4f), new Rect(0f, 0.5f, 0.5f, 0.5f), new Rect(0f, 0.4f, 0.6f, 0.6f) },
  21. // new Rect[] { new Rect(0.7f, 0.7f, 0.3f, 0.3f), new Rect(0.6f, 0.6f, 0.4f, 0.4f), new Rect(0.5f, 0.5f, 0.5f, 0.5f), new Rect(0.4f, 0.4f, 0.6f, 0.6f) }
  22. //};
  23. static Rect[][] LocateAreaData = new Rect[][] {
  24. new Rect[] { new Rect(0f, 0f, 0.3f, 0.3f), new Rect(0f, 0f, 0.4f, 0.4f), new Rect(0f, 0f, 0.5f, 0.5f) },
  25. new Rect[] { new Rect(0.7f, 0f, 0.3f, 0.3f), new Rect(0.6f, 0f, 0.4f, 0.4f), new Rect(0.5f, 0f, 0.5f, 0.5f) },
  26. new Rect[] { new Rect(0f, 0.7f, 0.3f, 0.3f), new Rect(0f, 0.6f, 0.4f, 0.4f), new Rect(0f, 0.5f, 0.5f, 0.5f) },
  27. new Rect[] { new Rect(0.7f, 0.7f, 0.3f, 0.3f), new Rect(0.6f, 0.6f, 0.4f, 0.4f), new Rect(0.5f, 0.5f, 0.5f, 0.5f) }
  28. };
  29. //static bool LocateDebug = false;
  30. static bool LocateDebug = true;
  31. public Geometry2D.Vector<int> Size => ScreenLocate.Main.CameraSize;
  32. public ScreenMap Screen; // 识别到的屏幕,用于执行透视变换
  33. int capture = 0;
  34. int delay = 0;
  35. int maxCapture;
  36. int maxDelay;
  37. Geometry.Vector<float>[] ScreenBlackTexture;
  38. Geometry.Vector<float>[] ScreenWhiteTexture;
  39. int locateIndex = -1;
  40. List<Rect> locateArea = new List<Rect> {
  41. new Rect(0f, 0f, 0.5f, 0.5f), new Rect(0.5f, 0f, 0.5f, 0.5f), new Rect(0f, 0.5f, 0.5f, 0.5f), new Rect(0.5f, 0.5f, 0.5f, 0.5f)
  42. }; // 屏幕显示白色的区域大小
  43. float areaPercent => locateArea[locateIndex].size.x; // 当前白色区域的占比
  44. int areaSelected = -1; // 选择哪个区域,顺序与Quadrilateral对应
  45. List<float> sumTemp = new List<float>();
  46. List<OrdinalQuadrilateral> quadTemp = new List<OrdinalQuadrilateral>();
  47. //public ScreenIdentification(WebCamTexture texture)
  48. //{
  49. // Size = new Geometry2D.Vector<int>(texture.width, texture.height);
  50. // Screen = new ScreenMap();
  51. //}
  52. public static UnityEngine.Color FloatValueToColor(float i)
  53. {
  54. switch (i)
  55. {
  56. case 1:
  57. return UnityEngine.Color.green;
  58. case 2:
  59. return UnityEngine.Color.red;
  60. case 3:
  61. return UnityEngine.Color.yellow;
  62. default:
  63. return UnityEngine.Color.black;
  64. }
  65. }
  66. public ScreenIdentification()
  67. {
  68. Screen = new ScreenMap();
  69. //OnLocateScreenEnter += () => Debug.Log("OnLocateScreenEnter");
  70. //OnLocateScreenEnd += () => Debug.Log("OnLocateScreenEnd");
  71. }
  72. public void SetScreenQuad(QuadrilateralInCamera quad) => Screen.QuadInCamera = quad;
  73. public event Action OnLocateScreenEnter;
  74. public event Action OnLocateScreenEnd;
  75. public bool bStartLocateScreen { get; set; } = false;//是否进行捕获
  76. // 自动识别开始的入口
  77. public void LocateScreen(int Capture = 45, int Delay = 45) //数值单位是frame
  78. {
  79. if (ScreenLocate.Main.DebugScreenImage != null && ScreenLocate.Main.DebugOnZIMDemo) // 这段仅用于测试图片
  80. {
  81. ScreenLocate.Main.CameraSize = new Geometry2D.Vector<int>(ScreenLocate.Main.DebugScreenImage.width, ScreenLocate.Main.DebugScreenImage.height);
  82. DebugImage(ScreenLocate.Main.DebugScreenImage);
  83. Screen.QuadInCamera = new QuadrilateralInCamera(quadTemp[0], new Vector(ScreenLocate.Main.DebugScreenImage.width, ScreenLocate.Main.DebugScreenImage.height));
  84. ScreenLocate.SetScreen(null);
  85. ScreenLocate.Main.ShowScreen(ScreenLocate.Main.ScreenQuad, Screen.QuadInCamera);
  86. delay = 0;
  87. capture = 0;
  88. ScreenWhiteTexture = null;
  89. ScreenBlackTexture = null;
  90. locateIndex = -1;
  91. areaSelected = -1;
  92. quadTemp.Clear();
  93. sumTemp.Clear();
  94. return;
  95. }
  96. delay = Math.Max(Delay, 5);
  97. capture = Math.Max(Capture, 5);
  98. maxDelay = Delay;
  99. maxCapture = Capture;
  100. ScreenLocate.SetScreen(new Rect(0f, 0f, 1f, 1f), UnityEngine.Color.black);
  101. //ScreenLocate.SetScreen(new Rect(0f, 0f, 0.6f, 0.6f), UnityEngine.Color.white);
  102. //bStartLocateScreen = false;
  103. OnLocateScreenEnter?.Invoke();
  104. }
  105. /// <summary>
  106. /// 开始进行捕获
  107. /// 初始化了两个数据 capture 和 delay
  108. /// </summary>
  109. /// <returns></returns>
  110. public bool isInitLocateScreen() {
  111. return capture != 0 && delay != 0;
  112. }
  113. void DebugImage(Texture2D image)
  114. {
  115. QuadrilateralFit(out Texture2D LocateLightedRedTex,out Texture2D ChoosableLineTex, out Texture2D ScreenQuadTex, 5, image);
  116. ScreenLocate.DebugTexture(2, LocateLightedRedTex);
  117. ScreenLocate.DebugTexture(3, ScreenQuadTex);
  118. // 融合线段和原图
  119. ScreenLocate.DebugTexture(4, image.Merge(ScreenQuadTex));
  120. ScreenLocate.DebugTexture(5, ChoosableLineTex);
  121. //var watch = new System.Diagnostics.Stopwatch();
  122. //watch.Start();
  123. //var times = new List<double>() { 0.0 };
  124. #if (!NDEBUG && DEBUG && ENABLE_LOG)
  125. Console.WriteLine($"{TAG} quadTemp.Count:{quadTemp.Count}");
  126. #endif
  127. if (quadTemp.Count > 0)
  128. {
  129. var quad = quadTemp[0];
  130. ScreenLocate.Main.ShowScreen(ScreenLocate.Main.outputRawImages[4].transform.GetChild(0) as RectTransform,
  131. new QuadrilateralInCamera(quad, image.Size().o0Vector()));
  132. // 透视变换
  133. var srcWidth = LocateLightedRedTex.width;
  134. var transformWidth = (int)((quad.B.x - quad.A.x + quad.D.x - quad.C.x) / 2);
  135. var transformHeight = (int)((quad.C.y - quad.A.y + quad.D.y - quad.B.y) / 2);
  136. var transformTex = new Texture2D(transformWidth, transformHeight);
  137. var pt = new ZIMPerspectiveTransform(new OrdinalQuadrilateral(new Vector(0, 0), new Vector(transformWidth, 0), new Vector(0, transformHeight), new Vector(transformWidth, transformHeight)), quad);
  138. var dstPixel = new UnityEngine.Color[transformWidth * transformHeight];
  139. var srcPixel = LocateLightedRedTex.GetPixels();
  140. Parallel.For(0, transformWidth, (x) =>
  141. {
  142. for (int y = 0; y < transformHeight; y++)
  143. {
  144. var index = y * transformWidth + x;
  145. var sampleCoord = pt.TransformRound(x, y);
  146. dstPixel[index] = srcPixel[sampleCoord.y * srcWidth + sampleCoord.x];
  147. }
  148. });
  149. transformTex.SetPixels(dstPixel);
  150. transformTex.Apply();
  151. //ScreenLocate.DebugTexture(1, transformTex);
  152. #if (!NDEBUG && DEBUG && ENABLE_LOG)
  153. Console.WriteLine($"{TAG} ScreenLocate.DebugTexture 1:{transformTex.GetNativeTexturePtr()}");
  154. #endif
  155. }
  156. //times.Add(watch.ElapsedMilliseconds);
  157. //UnityEngine.Debug.Log("time: " + (times[times.Count - 1] - times[times.Count - 2]));
  158. }
  159. public void NextScreen()
  160. {
  161. // 测试用
  162. if (LocateDebug && areaSelected == -1)
  163. {
  164. LocateAreaData = new Rect[][] { new Rect[] { new Rect(0, 0, 1f, 1f) } };
  165. locateIndex = 3;
  166. areaSelected = 0;
  167. locateArea.AddRange(LocateAreaData[0]);
  168. }
  169. // index从-1开始
  170. locateIndex++;
  171. if (locateIndex < locateArea.Count) // 依次点亮屏幕区域
  172. {
  173. ScreenLocate.SetScreen(locateArea[locateIndex], UnityEngine.Color.white);
  174. delay = maxDelay;
  175. capture = maxCapture;
  176. }
  177. else // 退出屏幕黑白控制
  178. {
  179. ScreenLocate.SetScreen(null);
  180. ScreenLocate.Main.ShowScreen(ScreenLocate.Main.ScreenQuad, Screen.QuadInCamera);
  181. Reset();
  182. }
  183. }
  184. void Reset()
  185. {
  186. // bStartLocateScreen = false;
  187. delay = 0;
  188. capture = 0;
  189. ScreenWhiteTexture = null;
  190. ScreenBlackTexture = null;
  191. locateIndex = -1;
  192. areaSelected = -1;
  193. locateArea.RemoveRange(4, LocateAreaData[0].Length);
  194. quadTemp.Clear();
  195. sumTemp.Clear();
  196. }
  197. public void CaptureBlack(Texture2D cam)
  198. {
  199. if (ScreenBlackTexture == null)
  200. ScreenBlackTexture = new Geometry.Vector<float>[Size.x * Size.y];
  201. var pixel = cam.GetPixels();
  202. Parallel.For(0, Size.x * Size.y, i =>
  203. {
  204. var ip = pixel[i];
  205. ScreenBlackTexture[i] += new Geometry.Vector<float>(ip.r / maxCapture, ip.g / maxCapture, ip.b / maxCapture);
  206. });
  207. }
  208. public void CaptureWhite(Texture2D cam)
  209. {
  210. if (ScreenWhiteTexture == null)
  211. ScreenWhiteTexture = new Geometry.Vector<float>[Size.x * Size.y];
  212. var pixel = cam.GetPixels();
  213. Parallel.For(0, Size.x * Size.y, i =>
  214. {
  215. var ip = pixel[i];
  216. ScreenWhiteTexture[i] += new Geometry.Vector<float>(ip.r / maxCapture, ip.g / maxCapture, ip.b / maxCapture);
  217. });
  218. }
  219. public void CaptureStay(Texture2D cam)
  220. {
  221. if (locateIndex == -1) // 屏幕黑色
  222. {
  223. CaptureBlack(cam);
  224. }
  225. else // 屏幕部分为白色
  226. {
  227. CaptureWhite(cam);
  228. }
  229. }
  230. public void CaptureEnd()
  231. {
  232. //Debug.Log("locateIndex: " + locateIndex + ", quad: " + quadTemp.Count);
  233. if (locateIndex == -1)
  234. return;
  235. if (locateIndex < 4)
  236. {
  237. sumTemp.Add(GetBrightness());
  238. ScreenWhiteTexture = null;
  239. // 选择亮度差最大的区域
  240. if (locateIndex == 3)
  241. {
  242. areaSelected = sumTemp.MaxIndex();
  243. locateArea.AddRange(LocateAreaData[areaSelected]);
  244. }
  245. }
  246. else if (locateIndex >= 4 && locateIndex < locateArea.Count - 1)
  247. {
  248. QuadrilateralFit(out _, out _, out _);
  249. ScreenWhiteTexture = null;
  250. }
  251. else
  252. {
  253. QuadrilateralFit(out Texture2D LocateLightedRedTex,out Texture2D ChoosableLineTex, out Texture2D ScreenQuadTex);
  254. ScreenLocate.DebugTexture(2, LocateLightedRedTex);
  255. ScreenLocate.DebugTexture(3, ScreenQuadTex);
  256. // 融合线段和原图
  257. ScreenLocate.DebugTexture(4, LocateLightedRedTex.Merge(ScreenQuadTex));
  258. ScreenLocate.DebugTexture(5, ChoosableLineTex);
  259. if (quadTemp.Count != LocateAreaData[0].Length)
  260. {
  261. Debug.Log($"<color=yellow>[ScreenIdentification] 拟合四边形失败, quadTemp.Count: {quadTemp.Count}</color>");
  262. }
  263. else if (quadTemp.Count == 1)
  264. {
  265. Screen.QuadInCamera = new QuadrilateralInCamera(quadTemp[0], new Vector(Size.x, Size.y));
  266. Debug.Log($"<color=aqua>[ScreenIdentification] 拟合成功,Quad: {Screen.QuadInCamera.QuadString}____{Screen.QuadInCamera.SizeString}</color>");
  267. }
  268. else
  269. {
  270. // Debug.Log($"拟合四边形 2 , quadTemp.Count: {quadTemp.Count}");
  271. // 线性拟合
  272. var xValue = new List<float>() { 0 };
  273. var predicts = new List<Vector>();
  274. foreach (var i in LocateAreaData[0])
  275. xValue.Add(i.size.x);
  276. Vector baseVertex = Vector.Zero; // x==0 时的点
  277. {
  278. foreach (var q in quadTemp)
  279. {
  280. baseVertex += q[areaSelected];
  281. }
  282. baseVertex /= quadTemp.Count;
  283. }
  284. double rs = 0.0;
  285. for (int i = 0; i < 4; i++)
  286. {
  287. if (i == areaSelected)
  288. {
  289. predicts.Add(baseVertex);
  290. }
  291. else
  292. {
  293. var yValue = new List<Vector>() { baseVertex };
  294. foreach (var q in quadTemp)
  295. {
  296. yValue.Add(q[i]);
  297. }
  298. var lr = LinerRegression1D.Fit(2, xValue.ToArray(), yValue.ToArray());
  299. rs += lr.RSquared / 3;
  300. predicts.Add(lr.Predict<Vector>(1));
  301. }
  302. }
  303. Screen.QuadInCamera = new QuadrilateralInCamera(predicts, new Vector(Size.x, Size.y));
  304. Debug.Log($"<color=aqua>[ScreenIdentification] 拟合成功,RSquared: {rs}, Quad: {Screen.QuadInCamera.QuadString}____{Screen.QuadInCamera.SizeString}</color>");
  305. //if (rs < 0.8) Screen.Quad = null;
  306. }
  307. OnLocateScreenEnd?.Invoke();
  308. }
  309. }
  310. public bool Update(Texture2D cam)
  311. {
  312. //if (!bStartLocateScreen) return false;
  313. if (delay != 0)
  314. {
  315. //ScreenLocate.Main.CreateUVCTexture2DFocusSizeIfNeeded(1280, 720);
  316. delay--;
  317. if (delay == 0)
  318. {
  319. ScreenLocate.Main.CameraSize = new Geometry2D.Vector<int>(cam.width, cam.height); // 记录当前的分辨率
  320. Debug.Log("[ScreenIdentification] 采样纹理,分辨率: [" + Size.x + ", " + Size.y + "]");
  321. }
  322. return true;
  323. }
  324. if (capture != 0)
  325. {
  326. //ScreenLocate.Main.CreateUVCTexture2DFocusSizeIfNeeded(1280, 720);
  327. CaptureStay(cam);
  328. capture--;
  329. if (capture == 0)
  330. {
  331. CaptureEnd();
  332. NextScreen();
  333. }
  334. return true;
  335. }
  336. return false;
  337. #region Old
  338. /*
  339. if (delay != 0)
  340. {
  341. delay--;
  342. return true;
  343. }
  344. if (capture != 0)
  345. {
  346. capture--;
  347. if (ScreenBlackTexture == null)
  348. ScreenBlackTexture = new Geometry.Vector<float>[Size.x * Size.y];
  349. var pixel = cam.GetPixels();
  350. Parallel.For(0, Size.x * Size.y, i =>
  351. {
  352. var ip = pixel[i];
  353. ScreenBlackTexture[i] += new Geometry.Vector<float>(ip.r, ip.g, ip.b);
  354. });
  355. if (capture == 0)
  356. ScreenLocate.SetScreen(UnityEngine.Color.black);
  357. return true;
  358. }
  359. if (delay != 0)
  360. {
  361. delay--;
  362. return true;
  363. }
  364. if (capture != 0)
  365. {
  366. capture--;
  367. if (ScreenWhiteTexture == null)
  368. ScreenWhiteTexture = new Geometry.Vector<float>[Size.x * Size.y];
  369. var pixel = cam.GetPixels();
  370. Parallel.For(0, Size.x * Size.y, i =>
  371. {
  372. var ip = pixel[i];
  373. ScreenWhiteTexture[i] += new Geometry.Vector<float>(ip.r, ip.g, ip.b);
  374. });
  375. if (capture == 0)
  376. ScreenLocate.SetScreen(UnityEngine.Color.black);
  377. return true;
  378. }
  379. if (delay != 0)
  380. {
  381. delay--;
  382. return true;
  383. }
  384. if (capture != 0)
  385. {
  386. capture--;
  387. var pixel = cam.GetPixels();
  388. Parallel.For(0, Size.x * Size.y, i =>
  389. {
  390. var ip = pixel[i];
  391. ScreenWhiteTexture[i] -= new Geometry.Vector<float>(ip.r, ip.g, ip.b);
  392. });
  393. if (capture == 0)
  394. {
  395. ScreenLocate.SetScreen(null);
  396. UnityEngine.Color[] newPixel = new UnityEngine.Color[Size.x * Size.y];
  397. Parallel.For(0, Size.x * Size.y, i => {
  398. var pi = ScreenWhiteTexture[i] /= capture;
  399. newPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z);
  400. });
  401. //读取数据
  402. //{
  403. // var fileName = "3.bin";
  404. // ScreenLocateTexture = $"2023 04 16 厦门测试数据/{fileName}".FileReadByte<Vector<float>[]>();
  405. // Debug.Log($"Read {fileName}");
  406. // Parallel.For(0, Size.x * Size.y, i =>
  407. // {
  408. // var pi = ScreenLocateTexture[i];
  409. // newPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z);
  410. // });
  411. //}
  412. var ScreenLocateTex = new Texture2D(Size.x, Size.y);
  413. ScreenLocateTex.SetPixels(newPixel);
  414. ScreenLocateTex.Apply();
  415. //ScreenLocate.DebugTexture(2, ScreenLocateTex);
  416. var ScreenLocateTexLighted = ScreenLocateTex.AutoLight(10);
  417. //ScreenLocate.DebugTexture(2, ScreenLocateTexLighted);
  418. //var FileSavePath = Application.persistentDataPath + "/ScreenLocateTexture.bin";
  419. bool Save = ScreenLocate.Main.SaveToggle.isOn;
  420. string time;
  421. if (Save)
  422. {
  423. time = DateTime.Now.ToString("yyyyMMdd_HHmmss");
  424. var FileSavePath = $"屏幕定位数据{time}.bin";
  425. FileSavePath.FileWriteByte(ScreenWhiteTexture);
  426. var bytes = ScreenLocateTexLighted.EncodeToPNG();
  427. File.WriteAllBytes($"屏幕定位数据{time}.png", bytes);
  428. Debug.Log("ScreenLocateTexture Saved To: " + FileSavePath);
  429. }
  430. var ScreenLocateTexR = ScreenLocateTexLighted.ToRGB(ColorChannel.Red);
  431. var ScreenLocateTexG = ScreenLocateTexLighted.ToRGB(ColorChannel.Green);
  432. var ScreenLocateTexB = ScreenLocateTexLighted.ToRGB(ColorChannel.Blue);
  433. ScreenLocate.DebugTexture(2, ScreenLocateTexR);
  434. //ScreenLocate.DebugTexture(4, ScreenLocateTexG);
  435. //ScreenLocate.DebugTexture(5, ScreenLocateTexB);
  436. var watch = new System.Diagnostics.Stopwatch();
  437. watch.Start();
  438. var times = new List<double>() { 0.0 };
  439. var ScreenLocateTexLightedMat = ScreenLocateTexLighted.Too0Mat();
  440. //var ScreenLocateTexLightedMat = texture.Too0Mat();
  441. //var (edge, edgeDir) = ScreenLocateTexLightedMat.IdentifyEdge();
  442. var (edge, edgeDir) = ScreenLocateTexLightedMat.zimIdentifyEdgeGradientAny(15);
  443. //ScreenLocate.DebugTexture(4, ScreenLocateTexLighted.Too0Mat().IdentifyEdgeGradient().ToTex());
  444. //ScreenLocate.DebugTexture(4, edge.ToTex());
  445. var quadLines = ScreenLocateTexLightedMat.IdentifyQuadLSD(edge, edgeDir, out List<Line> lightLines, 30);
  446. var drawLineMap = new MatrixF2D(edge..Size.x, edge.Size.y);
  447. int lineCount = 0;
  448. foreach (var l in quadLines)
  449. {
  450. if (l != null)
  451. {
  452. o0Extension.DrawLine(drawLineMap.DrawLine(l, (x, y) => 1, new Geometry2D.Float.Vector(0, 10));
  453. lineCount++;
  454. }
  455. }
  456. if (lineCount == 4)
  457. {
  458. var a = quadLines[0].Intersect(quadLines[3], false).Value;
  459. var b = quadLines[0].Intersect(quadLines[1], false).Value;
  460. var c = quadLines[2].Intersect(quadLines[3], false).Value;
  461. var d = quadLines[1].Intersect(quadLines[2], false).Value;
  462. Quad = new Quadrilateral(a, b, c, d);
  463. if (!Quad.IsInScreen(ScreenLocate.Main.WebCamera.Size))
  464. Quad = null;
  465. }
  466. ScreenLocate.Main.ShowScreen(Quad);
  467. //var lines = edge.IdentifyLineLSD(edgeDir, 100);
  468. ////var lines = ScreenLocateTexLightedMat.IdentifyLineLSD();
  469. //var drawLineMap = new MatrixF2D(edge..Size.x, edge.Size.y);
  470. //var returnMaxLines = lines.Sub(0, 10);
  471. //foreach (var (line, sum, gradient) in returnMaxLines)
  472. // o0Extension.DrawLine(drawLineMap.DrawLine(line, (x, y) => 1, new Geometry2D.Float.Vector(0, 10));
  473. ScreenLocate.DebugTexture(3, drawLineMap.ToTex());
  474. //{
  475. // var bytes = drawLineMap.ToTex().EncodeToPNG();
  476. // File.WriteAllBytes($"屏幕定位数据DrawLineMap.png", bytes);
  477. //}
  478. times.Add(watch.ElapsedMilliseconds);
  479. UnityEngine.Debug.Log("time: " + (times[times.Count - 1] - times[times.Count - 2]));
  480. //ScreenLocate.DebugTexture(5, edge.IdentifyLine(edgeDir).ToTex());
  481. //ScreenLocate.DebugTexture(4, ScreenLocateTexLighted.Too0Mat().IdentifyEdgeGradientX().ToTex());
  482. //ScreenLocate.DebugTexture(5, ScreenLocateTexLighted.Too0Mat().IdentifyEdgeGradientY().ToTex());
  483. //var convolutionLighted2 = ScreenLocateTexLighted.Too0Mat().IdentifyEdgeVariance().ToTex();
  484. // opecncv处理
  485. // zim
  486. {
  487. //var cvLines = edge.cvHoughLinesP();
  488. //ScreenLocate.DebugTexture(5, cvLines);
  489. //var myLines = Hough.Transform(edgeMat);
  490. //var cvLines = edge.cvLine(myLines);
  491. //ScreenLocate.DebugTexture(5, cvLines);
  492. }
  493. UnityEngine.Object.Destroy(ScreenLocateTex);
  494. //ScreenLocate.DebugTexture(4, convolutionLighted2);
  495. }
  496. return true;
  497. }
  498. /*
  499. var avg = new Geometry4D.Vector<float>();
  500. var pixel = texture.GetPixels();
  501. foreach(var i in pixel.Index())
  502. {
  503. var iP = pixel[i];
  504. avg += new Geometry4D.Vector<float>(iP.r, iP.g, iP.b, iP.a);
  505. }
  506. avg /= pixel.Count();
  507. /*
  508. var (texLightedR, texLightedG, texLightedB) = ToRGB(newTex);
  509. ScreenLocate.DebugTexture(3, texLightedR);
  510. ScreenLocate.DebugTexture(4, texLightedG);
  511. ScreenLocate.DebugTexture(5, texLightedB);
  512. //Debug.Log(avg);
  513. return false;
  514. /**/
  515. #endregion
  516. }
  517. float GetBrightness()
  518. {
  519. UnityEngine.Color[] differPixel = new UnityEngine.Color[Size.x * Size.y];
  520. Parallel.For(0, Size.x * Size.y, i =>
  521. {
  522. var pi = ScreenWhiteTexture[i] - ScreenBlackTexture[i];
  523. differPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z);
  524. });
  525. var sum = 0f;
  526. foreach (var i in differPixel)
  527. {
  528. sum += i.Brightness();
  529. }
  530. sum /= differPixel.Length;
  531. //Debug.Log(sum);
  532. return sum;
  533. }
  534. void QuadrilateralFit(out Texture2D LocateLightedRedTex,out Texture2D ChoosableLineTex, out Texture2D ScreenQuadTex, float lineWidth = 10, Texture2D debugImage = null)
  535. {
  536. UnityEngine.Color[] differPixel = new UnityEngine.Color[Size.x * Size.y];
  537. //读取数据
  538. if (debugImage != null)
  539. {
  540. Debug.Log($"<color=aqua>Debug {debugImage.name}</color>");
  541. differPixel = debugImage.GetPixels();
  542. }
  543. else // 获得屏幕差值
  544. {
  545. Parallel.For(0, Size.x * Size.y, i =>
  546. {
  547. var pi = ScreenWhiteTexture[i] - ScreenBlackTexture[i];
  548. differPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z);
  549. });
  550. }
  551. var ScreenLocateTex = new Texture2D(Size.x, Size.y);
  552. ScreenLocateTex.SetPixels(differPixel);
  553. ScreenLocateTex.Apply();
  554. //ScreenLocate.DebugTexture(2, ScreenLocateTex);
  555. var ScreenLocateTexLighted = ScreenLocateTex.AutoLight(10);
  556. //ScreenLocate.DebugTexture(2, ScreenLocateTexLighted);
  557. var ScreenLocateTexR = ScreenLocateTexLighted.ToRGB(ColorChannel.Red);
  558. var ScreenLocateTexG = ScreenLocateTexLighted.ToRGB(ColorChannel.Green);
  559. var ScreenLocateTexB = ScreenLocateTexLighted.ToRGB(ColorChannel.Blue);
  560. LocateLightedRedTex = ScreenLocateTexR;
  561. //ScreenLocate.DebugTexture(2, ScreenLocateTexR);
  562. //ScreenLocate.DebugTexture(4, ScreenLocateTexG);
  563. //ScreenLocate.DebugTexture(5, ScreenLocateTexB);
  564. //var watch = new System.Diagnostics.Stopwatch();
  565. //watch.Start();
  566. //var times = new List<double>() { 0.0 };
  567. var ScreenLocateTexLightedMat = ScreenLocateTexLighted.Too0Mat();
  568. //var ScreenLocateTexLightedMat = texture.Too0Mat();
  569. //var (edge, edgeDir) = ScreenLocateTexLightedMat.IdentifyEdge();
  570. int conSize = (int)Math.Ceiling(0.007f * Size.y) * 2 + 1;
  571. conSize = Math.Max(conSize, 7); // 设置最小为7
  572. float minLength = conSize * 7.7f;
  573. minLength = locateIndex == -1 ? minLength : minLength * areaPercent; // minLength需要按比例缩小
  574. Debug.Log($"[ScreenIdentification] Size: ({Size.x},{Size.y}), 卷积核Size: {conSize}, 最小线段长度: {minLength}");
  575. var (edge, edgeDir) = ScreenLocateTexLightedMat.zimIdentifyEdgeGradientAny(conSize);
  576. var quadLines = ScreenLocateTexLightedMat.ZIMIdentifyQuadLSD(
  577. edge, edgeDir,
  578. out Line[] oldLines, out List<Line> possibleLines,out List<(Line, float, float)> allLines,
  579. Screen, conSize, conSize, minLength);
  580. // 将 allLines 输出一张图片
  581. var allLinesMap = new Matrix(ScreenLocateTexLightedMat.Size, Tiling: true);
  582. foreach (var l in allLines)
  583. {
  584. if (l.Item1 != null)
  585. o0Extension.DrawLine(allLinesMap, l.Item1, (x, y) => 3, new Geometry2D.Float.Vector(0, 2), true);
  586. }
  587. var allLinesTex = allLinesMap.ToTexRGBA(FloatValueToColor);
  588. ScreenLocate.DebugTexture(1, allLinesTex);
  589. // 将识别到的边画出来,并判断能否拼成屏幕,能拼成则设置ScreenMap
  590. // 线段顺序: 下、右、上、左
  591. List<Line> LineIdentified = new List<Line>();
  592. for (int i = 0; i < 4; i++)
  593. {
  594. if (quadLines[i] != null)
  595. LineIdentified.Add(quadLines[i]);
  596. else if (oldLines != null)
  597. LineIdentified.Add(oldLines[i]);
  598. }
  599. var drawScreenMap = new Matrix(ScreenLocateTexLightedMat.Size, Tiling: true);
  600. foreach (var l in LineIdentified)
  601. o0Extension.DrawLine(drawScreenMap, l, (x, y) => 1, new Geometry2D.Float.Vector(0, lineWidth));
  602. ScreenQuadTex = drawScreenMap.ToTex(); // out ScreenQuadTex
  603. QuadrilateralInCamera screenQuad = null;
  604. if (LineIdentified.Count == 4)
  605. {
  606. var a = LineIdentified[0].Intersect(LineIdentified[3], false).Value;
  607. var b = LineIdentified[0].Intersect(LineIdentified[1], false).Value;
  608. var c = LineIdentified[2].Intersect(LineIdentified[3], false).Value;
  609. var d = LineIdentified[1].Intersect(LineIdentified[2], false).Value;
  610. screenQuad = new QuadrilateralInCamera(a, b, c, d, new Vector(Size.x, Size.y));
  611. if (!screenQuad.IsQuadComplete())
  612. screenQuad = null;
  613. }
  614. if (screenQuad == null && Screen.QuadInCamera != null) // 如果可能,回退到上一个screen
  615. {
  616. Debug.Log("<color=aqua>[ScreenIdentification] 本次识别失败,回退到上次的识别结果</color>");
  617. quadTemp.Add(Screen.QuadInCamera.Quad);
  618. }
  619. else if (screenQuad != null)
  620. {
  621. Debug.Log("<color=aqua>[ScreenIdentification] 识别到四边形</color>");
  622. quadTemp.Add(screenQuad.Quad);
  623. }
  624. // 还需要输出一张识别结果图,包含干扰线段
  625. var LSDLineMap = new Matrix(ScreenLocateTexLightedMat.Size, Tiling: true);
  626. foreach (var l in possibleLines)
  627. {
  628. if (l != null && !quadLines.Contains(l))
  629. o0Extension.DrawLine(LSDLineMap, l, (x, y) => 3, new Geometry2D.Float.Vector(0, 2), true); // 其他的备选线段
  630. }
  631. foreach (var l in quadLines)
  632. {
  633. if (l != null)
  634. o0Extension.DrawLine(LSDLineMap, l, (x, y) => 2, new Geometry2D.Float.Vector(0, 4)); // 这次识别到的线段
  635. }
  636. if (oldLines != null)
  637. {
  638. foreach (var l in oldLines)
  639. o0Extension.DrawLine(LSDLineMap, l, (x, y) => 1, new Geometry2D.Float.Vector(0, 2), true); // 旧的屏幕线段(例如上次手动识别的)
  640. }
  641. ChoosableLineTex = LSDLineMap.ToTexRGBA(FloatValueToColor);
  642. // 是否将图片保存到本地
  643. if (ScreenLocate.Main.SaveToggle.isOn && ScreenLocate.Main.DebugOnZIMDemo)
  644. {
  645. var FileDirectory = $"Debug_屏幕定位/";
  646. SaveImages(FileDirectory,
  647. $"[ScreenLocate Auto] Size: ({Size.x},{Size.y}), 卷积核Size: {conSize}, 最小线段长度: {minLength}",
  648. ScreenLocateTex, allLinesTex, ScreenQuadTex);
  649. }
  650. //times.Add(watch.ElapsedMilliseconds);
  651. //UnityEngine.Debug.Log("time: " + (times[times.Count - 1] - times[times.Count - 2]));
  652. // opecncv处理, zim
  653. {
  654. //var cvLines = edge.cvHoughLinesP();
  655. //ScreenLocate.DebugTexture(5, cvLines);
  656. //var myLines = Hough.Transform(edgeMat);
  657. //var cvLines = edge.cvLine(myLines);
  658. //ScreenLocate.DebugTexture(5, cvLines);
  659. }
  660. UnityEngine.Object.Destroy(ScreenLocateTex);
  661. }
  662. void SaveImages(string FileDirectory, string log, Texture2D ScreenLocateTex, Texture2D allLinesTex, Texture2D ScreenQuadTex)
  663. {
  664. if (!Directory.Exists(FileDirectory))
  665. Directory.CreateDirectory(FileDirectory);
  666. var time = DateTime.Now.ToString("yyyyMMdd_HHmmss");
  667. var pngData = (ScreenLocate.Main.outputTexture2D[7] as Texture2D)?.EncodeToPNG();
  668. if (pngData != null)
  669. File.WriteAllBytes($"{FileDirectory}{time}A屏幕原图.png", pngData);
  670. var pngData1 = ScreenLocateTex.EncodeToPNG();
  671. if (pngData1 != null)
  672. File.WriteAllBytes($"{FileDirectory}{time}B黑白色差.png", pngData1);
  673. var pngData2 = allLinesTex.EncodeToPNG();
  674. if (pngData2 != null)
  675. File.WriteAllBytes($"{FileDirectory}{time}C全部识别线段.png", pngData2);
  676. var pngData3 = ScreenQuadTex.EncodeToPNG();
  677. if (pngData3 != null)
  678. File.WriteAllBytes($"{FileDirectory}{time}D识别结果.png", pngData3);
  679. Debug.Log($"<color=aqua>({time}) 屏幕识别图片保存至:程序根目录/{FileDirectory}</color>");
  680. log += $"\r\n屏幕原图保存{pngData != null}, \r\n黑白色差保存{pngData1 != null}, \r\n全部识别线段保存{pngData2 != null}, \r\n识别结果保存{pngData3 != null}, ";
  681. File.WriteAllText($"{FileDirectory}{time}屏幕自动定位_日志.log", log);
  682. }
  683. }
  684. }