ScreenIdentification.cs 42 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011
  1. #define ENABLE_LOG
  2. using o0.Geometry2D.Float;
  3. using o0.Num;
  4. using System;
  5. using System.Collections.Generic;
  6. using System.IO;
  7. using System.Linq;
  8. using System.Threading.Tasks;
  9. using UnityEngine;
  10. using ZIM;
  11. using ZIM.Unity;
  12. namespace o0.Project
  13. {
  14. public partial class ScreenIdentification
  15. {
  16. private const string TAG = "ScreenIdentification#";
  17. //static Rect[][] LocateAreaData = new Rect[][] {
  18. // new Rect[] { new Rect(0f, 0f, 0.3f, 0.3f), new Rect(0f, 0f, 0.4f, 0.4f), new Rect(0f, 0f, 0.5f, 0.5f), new Rect(0f, 0f, 0.6f, 0.6f) },
  19. // new Rect[] { new Rect(0.7f, 0f, 0.3f, 0.3f), new Rect(0.6f, 0f, 0.4f, 0.4f), new Rect(0.5f, 0f, 0.5f, 0.5f), new Rect(0.4f, 0f, 0.6f, 0.6f) },
  20. // new Rect[] { new Rect(0f, 0.7f, 0.3f, 0.3f), new Rect(0f, 0.6f, 0.4f, 0.4f), new Rect(0f, 0.5f, 0.5f, 0.5f), new Rect(0f, 0.4f, 0.6f, 0.6f) },
  21. // new Rect[] { new Rect(0.7f, 0.7f, 0.3f, 0.3f), new Rect(0.6f, 0.6f, 0.4f, 0.4f), new Rect(0.5f, 0.5f, 0.5f, 0.5f), new Rect(0.4f, 0.4f, 0.6f, 0.6f) }
  22. //};
  23. static Rect[][] LocateAreaData = new Rect[][] {
  24. new Rect[] { new Rect(0f, 0f, 0.3f, 0.3f), new Rect(0f, 0f, 0.4f, 0.4f), new Rect(0f, 0f, 0.5f, 0.5f) },
  25. new Rect[] { new Rect(0.7f, 0f, 0.3f, 0.3f), new Rect(0.6f, 0f, 0.4f, 0.4f), new Rect(0.5f, 0f, 0.5f, 0.5f) },
  26. new Rect[] { new Rect(0f, 0.7f, 0.3f, 0.3f), new Rect(0f, 0.6f, 0.4f, 0.4f), new Rect(0f, 0.5f, 0.5f, 0.5f) },
  27. new Rect[] { new Rect(0.7f, 0.7f, 0.3f, 0.3f), new Rect(0.6f, 0.6f, 0.4f, 0.4f), new Rect(0.5f, 0.5f, 0.5f, 0.5f) }
  28. };
  29. //static bool LocateDebug = false;
  30. static bool LocateDebug = true;
  31. public Geometry2D.Vector<int> Size => ScreenLocate.Main.CameraSize;
  32. public ScreenMap Screen; // 识别到的屏幕,用于执行透视变换
  33. int capture = 0;
  34. int delay = 0;
  35. int maxCapture;
  36. int maxDelay;
  37. Geometry.Vector<float>[] ScreenBlackTexture;
  38. Geometry.Vector<float>[] ScreenWhiteTexture;
  39. int locateIndex = -1;
  40. List<Rect> locateArea = new List<Rect> {
  41. new Rect(0f, 0f, 0.5f, 0.5f), new Rect(0.5f, 0f, 0.5f, 0.5f), new Rect(0f, 0.5f, 0.5f, 0.5f), new Rect(0.5f, 0.5f, 0.5f, 0.5f)
  42. }; // 屏幕显示白色的区域大小
  43. float areaPercent => locateArea[locateIndex].size.x; // 当前白色区域的占比
  44. int areaSelected = -1; // 选择哪个区域,顺序与Quadrilateral对应
  45. List<float> sumTemp = new List<float>();
  46. List<OrdinalQuadrilateral> quadTemp = new List<OrdinalQuadrilateral>();
  47. //public ScreenIdentification(WebCamTexture texture)
  48. //{
  49. // Size = new Geometry2D.Vector<int>(texture.width, texture.height);
  50. // Screen = new ScreenMap();
  51. //}
  52. public static UnityEngine.Color FloatValueToColor(float i)
  53. {
  54. switch (i)
  55. {
  56. case 1:
  57. return UnityEngine.Color.green;
  58. case 2:
  59. return UnityEngine.Color.red;
  60. case 3:
  61. return UnityEngine.Color.yellow;
  62. default:
  63. return UnityEngine.Color.black;
  64. }
  65. }
  66. public ScreenIdentification()
  67. {
  68. Screen = new ScreenMap();
  69. OnLocateScreenEnter += () => Application.targetFrameRate = 30; // 固定识别的帧率,确保摄像机拍到正确的画面
  70. OnLocateScreenEnd += () => Application.targetFrameRate = 60;
  71. }
  72. public void SetScreenQuad(QuadrilateralInCamera quad) => Screen.QuadInCamera = quad;
  73. public event Action OnLocateScreenEnter;
  74. public event Action OnLocateScreenEnd;
  75. public bool bStartLocateScreen { get; set; } = false;//是否进行捕获
  76. // 自动识别开始的入口
  77. public void LocateScreen(int Capture = 30, int Delay = 30) //数值单位是frame
  78. {
  79. if (ScreenLocate.Main.DebugScreenImages.Count != 0 && ScreenLocate.Main.DebugOnZIMDemo) // 这段仅用于测试图片
  80. {
  81. ScreenLocate.Main.CameraSize = new Geometry2D.Vector<int>(ScreenLocate.Main.DebugScreenImages[0].width, ScreenLocate.Main.DebugScreenImages[0].height);
  82. DebugImage(ScreenLocate.Main.DebugScreenImages);
  83. Screen.QuadInCamera = new QuadrilateralInCamera(quadTemp[0], new Vector(ScreenLocate.Main.DebugScreenImages[0].width, ScreenLocate.Main.DebugScreenImages[0].height));
  84. ScreenLocate.SetScreen(null);
  85. ScreenLocate.Main.ShowScreen(ScreenLocate.Main.ScreenQuad, Screen.QuadInCamera);
  86. delay = 0;
  87. capture = 0;
  88. ScreenWhiteTexture = null;
  89. ScreenBlackTexture = null;
  90. locateIndex = -1;
  91. areaSelected = -1;
  92. quadTemp.Clear();
  93. sumTemp.Clear();
  94. ScreenLocate.Main.DebugScreenImages.Clear();
  95. return;
  96. }
  97. delay = Math.Max(Delay, 5);
  98. capture = Math.Max(Capture, 5);
  99. maxDelay = Delay;
  100. maxCapture = Capture;
  101. ScreenLocate.SetScreen(new Rect(0f, 0f, 1f, 1f), UnityEngine.Color.black);
  102. //ScreenLocate.SetScreen(new Rect(0f, 0f, 0.6f, 0.6f), UnityEngine.Color.white);
  103. //bStartLocateScreen = false;
  104. OnLocateScreenEnter?.Invoke();
  105. }
  106. /// <summary>
  107. /// 开始进行捕获
  108. /// 初始化了两个数据 capture 和 delay
  109. /// </summary>
  110. /// <returns></returns>
  111. public bool isInitLocateScreen() {
  112. return capture != 0 && delay != 0;
  113. }
  114. void DebugImage(List<Texture2D> images)
  115. {
  116. QuadrilateralFit(images, 5);
  117. //var watch = new System.Diagnostics.Stopwatch();
  118. //watch.Start();
  119. //var times = new List<double>() { 0.0 };
  120. #if (!NDEBUG && DEBUG && ENABLE_LOG)
  121. Console.WriteLine($"{TAG} quadTemp.Count:{quadTemp.Count}");
  122. #endif
  123. if (quadTemp.Count > 0)
  124. {
  125. var quad = quadTemp[0];
  126. ScreenLocate.Main.ShowScreen(ScreenLocate.Main.outputRawImages[4].transform.GetChild(0) as RectTransform,
  127. new QuadrilateralInCamera(quad, images[0].Size().o0Vector()));
  128. // 透视变换
  129. // var srcWidth = LocateLightedRedTex.width;
  130. // var transformWidth = (int)((quad.B.x - quad.A.x + quad.D.x - quad.C.x) / 2);
  131. // var transformHeight = (int)((quad.C.y - quad.A.y + quad.D.y - quad.B.y) / 2);
  132. // var transformTex = new Texture2D(transformWidth, transformHeight);
  133. // var pt = new ZIMPerspectiveTransform(new OrdinalQuadrilateral(new Vector(0, 0), new Vector(transformWidth, 0), new Vector(0, transformHeight), new Vector(transformWidth, transformHeight)), quad);
  134. // var dstPixel = new UnityEngine.Color[transformWidth * transformHeight];
  135. // var srcPixel = LocateLightedRedTex.GetPixels();
  136. // Parallel.For(0, transformWidth, (x) =>
  137. // {
  138. // for (int y = 0; y < transformHeight; y++)
  139. // {
  140. // var index = y * transformWidth + x;
  141. // var sampleCoord = pt.TransformRound(x, y);
  142. // dstPixel[index] = srcPixel[sampleCoord.y * srcWidth + sampleCoord.x];
  143. // }
  144. // });
  145. // transformTex.SetPixels(dstPixel);
  146. // transformTex.Apply();
  147. // //ScreenLocate.DebugTexture(1, transformTex);
  148. //#if (!NDEBUG && DEBUG && ENABLE_LOG)
  149. // Console.WriteLine($"{TAG} ScreenLocate.DebugTexture 1:{transformTex.GetNativeTexturePtr()}");
  150. //#endif
  151. }
  152. //times.Add(watch.ElapsedMilliseconds);
  153. //UnityEngine.Debug.Log("time: " + (times[times.Count - 1] - times[times.Count - 2]));
  154. }
  155. public void NextScreen()
  156. {
  157. // 测试用
  158. if (LocateDebug && areaSelected == -1)
  159. {
  160. LocateAreaData = new Rect[][] { new Rect[] { new Rect(0, 0, 1f, 1f) } };
  161. locateIndex = 3;
  162. areaSelected = 0;
  163. locateArea.AddRange(LocateAreaData[0]);
  164. }
  165. // index从-1开始
  166. locateIndex++;
  167. if (locateIndex < locateArea.Count) // 依次点亮屏幕区域
  168. {
  169. ScreenLocate.SetScreen(locateArea[locateIndex], UnityEngine.Color.white);
  170. delay = maxDelay;
  171. capture = maxCapture;
  172. }
  173. else // 退出屏幕黑白控制
  174. {
  175. ScreenLocate.SetScreen(null);
  176. ScreenLocate.Main.ShowScreen(ScreenLocate.Main.ScreenQuad, Screen.QuadInCamera);
  177. Reset();
  178. }
  179. }
  180. void Reset()
  181. {
  182. // bStartLocateScreen = false;
  183. delay = 0;
  184. capture = 0;
  185. ScreenWhiteTexture = null;
  186. ScreenBlackTexture = null;
  187. locateIndex = -1;
  188. areaSelected = -1;
  189. locateArea.RemoveRange(4, LocateAreaData[0].Length);
  190. quadTemp.Clear();
  191. sumTemp.Clear();
  192. }
  193. public void CaptureBlack(Texture2D cam)
  194. {
  195. if (ScreenBlackTexture == null)
  196. ScreenBlackTexture = new Geometry.Vector<float>[Size.x * Size.y];
  197. var pixel = cam.GetPixels();
  198. Parallel.For(0, Size.x * Size.y, i =>
  199. {
  200. var ip = pixel[i];
  201. ScreenBlackTexture[i] += new Geometry.Vector<float>(ip.r / maxCapture, ip.g / maxCapture, ip.b / maxCapture);
  202. });
  203. }
  204. public void CaptureWhite(Texture2D cam)
  205. {
  206. if (ScreenWhiteTexture == null)
  207. ScreenWhiteTexture = new Geometry.Vector<float>[Size.x * Size.y];
  208. var pixel = cam.GetPixels();
  209. Parallel.For(0, Size.x * Size.y, i =>
  210. {
  211. var ip = pixel[i];
  212. ScreenWhiteTexture[i] += new Geometry.Vector<float>(ip.r / maxCapture, ip.g / maxCapture, ip.b / maxCapture);
  213. });
  214. }
  215. public void CaptureStay(Texture2D cam)
  216. {
  217. if (locateIndex == -1) // 屏幕黑色
  218. {
  219. CaptureBlack(cam);
  220. }
  221. else // 屏幕部分为白色
  222. {
  223. CaptureWhite(cam);
  224. }
  225. }
  226. public void CaptureEnd()
  227. {
  228. //Debug.Log("locateIndex: " + locateIndex + ", quad: " + quadTemp.Count);
  229. if (locateIndex == -1)
  230. return;
  231. if (locateIndex < 4)
  232. {
  233. sumTemp.Add(GetBrightness());
  234. ScreenWhiteTexture = null;
  235. // 选择亮度差最大的区域
  236. if (locateIndex == 3)
  237. {
  238. areaSelected = sumTemp.MaxIndex();
  239. locateArea.AddRange(LocateAreaData[areaSelected]);
  240. }
  241. }
  242. else if (locateIndex >= 4 && locateIndex < locateArea.Count - 1)
  243. {
  244. QuadrilateralFit();
  245. ScreenWhiteTexture = null;
  246. }
  247. else
  248. {
  249. QuadrilateralFit();
  250. if (quadTemp.Count != LocateAreaData[0].Length)
  251. {
  252. Debug.Log($"<color=yellow>[ScreenIdentification] 拟合四边形失败, quadTemp.Count: {quadTemp.Count}</color>");
  253. }
  254. else if (quadTemp.Count == 1)
  255. {
  256. Screen.QuadInCamera = new QuadrilateralInCamera(quadTemp[0], new Vector(Size.x, Size.y));
  257. Debug.Log($"<color=aqua>[ScreenIdentification] 拟合成功,Quad: {Screen.QuadInCamera.QuadString}____{Screen.QuadInCamera.SizeString}</color>");
  258. }
  259. else
  260. {
  261. // Debug.Log($"拟合四边形 2 , quadTemp.Count: {quadTemp.Count}");
  262. // 线性拟合
  263. var xValue = new List<float>() { 0 };
  264. var predicts = new List<Vector>();
  265. foreach (var i in LocateAreaData[0])
  266. xValue.Add(i.size.x);
  267. Vector baseVertex = Vector.Zero; // x==0 时的点
  268. {
  269. foreach (var q in quadTemp)
  270. {
  271. baseVertex += q[areaSelected];
  272. }
  273. baseVertex /= quadTemp.Count;
  274. }
  275. double rs = 0.0;
  276. for (int i = 0; i < 4; i++)
  277. {
  278. if (i == areaSelected)
  279. {
  280. predicts.Add(baseVertex);
  281. }
  282. else
  283. {
  284. var yValue = new List<Vector>() { baseVertex };
  285. foreach (var q in quadTemp)
  286. {
  287. yValue.Add(q[i]);
  288. }
  289. var lr = LinerRegression1D.Fit(2, xValue.ToArray(), yValue.ToArray());
  290. rs += lr.RSquared / 3;
  291. predicts.Add(lr.Predict<Vector>(1));
  292. }
  293. }
  294. Screen.QuadInCamera = new QuadrilateralInCamera(predicts, new Vector(Size.x, Size.y));
  295. Debug.Log($"<color=aqua>[ScreenIdentification] 拟合成功,RSquared: {rs}, Quad: {Screen.QuadInCamera.QuadString}____{Screen.QuadInCamera.SizeString}</color>");
  296. //if (rs < 0.8) Screen.Quad = null;
  297. }
  298. OnLocateScreenEnd?.Invoke();
  299. }
  300. }
  301. public bool Update(Texture2D cam)
  302. {
  303. //if (!bStartLocateScreen) return false;
  304. if (delay != 0)
  305. {
  306. //ScreenLocate.Main.CreateUVCTexture2DFocusSizeIfNeeded(1280, 720);
  307. delay--;
  308. if (delay == 0)
  309. {
  310. ScreenLocate.Main.CameraSize = new Geometry2D.Vector<int>(cam.width, cam.height); // 记录当前的分辨率
  311. Debug.Log("[ScreenIdentification] 采样纹理,分辨率: [" + Size.x + ", " + Size.y + "]");
  312. }
  313. return true;
  314. }
  315. if (capture != 0)
  316. {
  317. //ScreenLocate.Main.CreateUVCTexture2DFocusSizeIfNeeded(1280, 720);
  318. CaptureStay(cam);
  319. capture--;
  320. if (capture == 0)
  321. {
  322. CaptureEnd();
  323. NextScreen();
  324. }
  325. return true;
  326. }
  327. return false;
  328. #region Old
  329. /*
  330. if (delay != 0)
  331. {
  332. delay--;
  333. return true;
  334. }
  335. if (capture != 0)
  336. {
  337. capture--;
  338. if (ScreenBlackTexture == null)
  339. ScreenBlackTexture = new Geometry.Vector<float>[Size.x * Size.y];
  340. var pixel = cam.GetPixels();
  341. Parallel.For(0, Size.x * Size.y, i =>
  342. {
  343. var ip = pixel[i];
  344. ScreenBlackTexture[i] += new Geometry.Vector<float>(ip.r, ip.g, ip.b);
  345. });
  346. if (capture == 0)
  347. ScreenLocate.SetScreen(UnityEngine.Color.black);
  348. return true;
  349. }
  350. if (delay != 0)
  351. {
  352. delay--;
  353. return true;
  354. }
  355. if (capture != 0)
  356. {
  357. capture--;
  358. if (ScreenWhiteTexture == null)
  359. ScreenWhiteTexture = new Geometry.Vector<float>[Size.x * Size.y];
  360. var pixel = cam.GetPixels();
  361. Parallel.For(0, Size.x * Size.y, i =>
  362. {
  363. var ip = pixel[i];
  364. ScreenWhiteTexture[i] += new Geometry.Vector<float>(ip.r, ip.g, ip.b);
  365. });
  366. if (capture == 0)
  367. ScreenLocate.SetScreen(UnityEngine.Color.black);
  368. return true;
  369. }
  370. if (delay != 0)
  371. {
  372. delay--;
  373. return true;
  374. }
  375. if (capture != 0)
  376. {
  377. capture--;
  378. var pixel = cam.GetPixels();
  379. Parallel.For(0, Size.x * Size.y, i =>
  380. {
  381. var ip = pixel[i];
  382. ScreenWhiteTexture[i] -= new Geometry.Vector<float>(ip.r, ip.g, ip.b);
  383. });
  384. if (capture == 0)
  385. {
  386. ScreenLocate.SetScreen(null);
  387. UnityEngine.Color[] newPixel = new UnityEngine.Color[Size.x * Size.y];
  388. Parallel.For(0, Size.x * Size.y, i => {
  389. var pi = ScreenWhiteTexture[i] /= capture;
  390. newPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z);
  391. });
  392. //读取数据
  393. //{
  394. // var fileName = "3.bin";
  395. // ScreenLocateTexture = $"2023 04 16 厦门测试数据/{fileName}".FileReadByte<Vector<float>[]>();
  396. // Debug.Log($"Read {fileName}");
  397. // Parallel.For(0, Size.x * Size.y, i =>
  398. // {
  399. // var pi = ScreenLocateTexture[i];
  400. // newPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z);
  401. // });
  402. //}
  403. var ScreenLocateTex = new Texture2D(Size.x, Size.y);
  404. ScreenLocateTex.SetPixels(newPixel);
  405. ScreenLocateTex.Apply();
  406. //ScreenLocate.DebugTexture(2, ScreenLocateTex);
  407. var ScreenLocateTexLighted = ScreenLocateTex.AutoLight(10);
  408. //ScreenLocate.DebugTexture(2, ScreenLocateTexLighted);
  409. //var FileSavePath = Application.persistentDataPath + "/ScreenLocateTexture.bin";
  410. bool Save = ScreenLocate.Main.SaveToggle.isOn;
  411. string time;
  412. if (Save)
  413. {
  414. time = DateTime.Now.ToString("yyyyMMdd_HHmmss");
  415. var FileSavePath = $"屏幕定位数据{time}.bin";
  416. FileSavePath.FileWriteByte(ScreenWhiteTexture);
  417. var bytes = ScreenLocateTexLighted.EncodeToPNG();
  418. File.WriteAllBytes($"屏幕定位数据{time}.png", bytes);
  419. Debug.Log("ScreenLocateTexture Saved To: " + FileSavePath);
  420. }
  421. var ScreenLocateTexR = ScreenLocateTexLighted.ToRGB(ColorChannel.Red);
  422. var ScreenLocateTexG = ScreenLocateTexLighted.ToRGB(ColorChannel.Green);
  423. var ScreenLocateTexB = ScreenLocateTexLighted.ToRGB(ColorChannel.Blue);
  424. ScreenLocate.DebugTexture(2, ScreenLocateTexR);
  425. //ScreenLocate.DebugTexture(4, ScreenLocateTexG);
  426. //ScreenLocate.DebugTexture(5, ScreenLocateTexB);
  427. var watch = new System.Diagnostics.Stopwatch();
  428. watch.Start();
  429. var times = new List<double>() { 0.0 };
  430. var ScreenLocateTexLightedMat = ScreenLocateTexLighted.Too0Mat();
  431. //var ScreenLocateTexLightedMat = texture.Too0Mat();
  432. //var (edge, edgeDir) = ScreenLocateTexLightedMat.IdentifyEdge();
  433. var (edge, edgeDir) = ScreenLocateTexLightedMat.zimIdentifyEdgeGradientAny(15);
  434. //ScreenLocate.DebugTexture(4, ScreenLocateTexLighted.Too0Mat().IdentifyEdgeGradient().ToTex());
  435. //ScreenLocate.DebugTexture(4, edge.ToTex());
  436. var quadLines = ScreenLocateTexLightedMat.IdentifyQuadLSD(edge, edgeDir, out List<Line> lightLines, 30);
  437. var drawLineMap = new MatrixF2D(edge..Size.x, edge.Size.y);
  438. int lineCount = 0;
  439. foreach (var l in quadLines)
  440. {
  441. if (l != null)
  442. {
  443. o0Extension.DrawLine(drawLineMap.DrawLine(l, (x, y) => 1, new Geometry2D.Float.Vector(0, 10));
  444. lineCount++;
  445. }
  446. }
  447. if (lineCount == 4)
  448. {
  449. var a = quadLines[0].Intersect(quadLines[3], false).Value;
  450. var b = quadLines[0].Intersect(quadLines[1], false).Value;
  451. var c = quadLines[2].Intersect(quadLines[3], false).Value;
  452. var d = quadLines[1].Intersect(quadLines[2], false).Value;
  453. Quad = new Quadrilateral(a, b, c, d);
  454. if (!Quad.IsInScreen(ScreenLocate.Main.WebCamera.Size))
  455. Quad = null;
  456. }
  457. ScreenLocate.Main.ShowScreen(Quad);
  458. //var lines = edge.IdentifyLineLSD(edgeDir, 100);
  459. ////var lines = ScreenLocateTexLightedMat.IdentifyLineLSD();
  460. //var drawLineMap = new MatrixF2D(edge..Size.x, edge.Size.y);
  461. //var returnMaxLines = lines.Sub(0, 10);
  462. //foreach (var (line, sum, gradient) in returnMaxLines)
  463. // o0Extension.DrawLine(drawLineMap.DrawLine(line, (x, y) => 1, new Geometry2D.Float.Vector(0, 10));
  464. ScreenLocate.DebugTexture(3, drawLineMap.ToTex());
  465. //{
  466. // var bytes = drawLineMap.ToTex().EncodeToPNG();
  467. // File.WriteAllBytes($"屏幕定位数据DrawLineMap.png", bytes);
  468. //}
  469. times.Add(watch.ElapsedMilliseconds);
  470. UnityEngine.Debug.Log("time: " + (times[times.Count - 1] - times[times.Count - 2]));
  471. //ScreenLocate.DebugTexture(5, edge.IdentifyLine(edgeDir).ToTex());
  472. //ScreenLocate.DebugTexture(4, ScreenLocateTexLighted.Too0Mat().IdentifyEdgeGradientX().ToTex());
  473. //ScreenLocate.DebugTexture(5, ScreenLocateTexLighted.Too0Mat().IdentifyEdgeGradientY().ToTex());
  474. //var convolutionLighted2 = ScreenLocateTexLighted.Too0Mat().IdentifyEdgeVariance().ToTex();
  475. // opecncv处理
  476. // zim
  477. {
  478. //var cvLines = edge.cvHoughLinesP();
  479. //ScreenLocate.DebugTexture(5, cvLines);
  480. //var myLines = Hough.Transform(edgeMat);
  481. //var cvLines = edge.cvLine(myLines);
  482. //ScreenLocate.DebugTexture(5, cvLines);
  483. }
  484. UnityEngine.Object.Destroy(ScreenLocateTex);
  485. //ScreenLocate.DebugTexture(4, convolutionLighted2);
  486. }
  487. return true;
  488. }
  489. /*
  490. var avg = new Geometry4D.Vector<float>();
  491. var pixel = texture.GetPixels();
  492. foreach(var i in pixel.Index())
  493. {
  494. var iP = pixel[i];
  495. avg += new Geometry4D.Vector<float>(iP.r, iP.g, iP.b, iP.a);
  496. }
  497. avg /= pixel.Count();
  498. /*
  499. var (texLightedR, texLightedG, texLightedB) = ToRGB(newTex);
  500. ScreenLocate.DebugTexture(3, texLightedR);
  501. ScreenLocate.DebugTexture(4, texLightedG);
  502. ScreenLocate.DebugTexture(5, texLightedB);
  503. //Debug.Log(avg);
  504. return false;
  505. /**/
  506. #endregion
  507. }
  508. float GetBrightness()
  509. {
  510. UnityEngine.Color[] differPixel = new UnityEngine.Color[Size.x * Size.y];
  511. Parallel.For(0, Size.x * Size.y, i =>
  512. {
  513. var pi = ScreenWhiteTexture[i] - ScreenBlackTexture[i];
  514. differPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z);
  515. });
  516. var sum = 0f;
  517. foreach (var i in differPixel)
  518. {
  519. sum += i.Brightness();
  520. }
  521. sum /= differPixel.Length;
  522. //Debug.Log(sum);
  523. return sum;
  524. }
  525. // 转换成屏幕定位所需的纹理图像
  526. Texture2D ToLocateTex(UnityEngine.Color[] pixels)
  527. {
  528. var ScreenLocateTex = new Texture2D(Size.x, Size.y);
  529. ScreenLocateTex.SetPixels(pixels);
  530. ScreenLocateTex.Apply();
  531. //ScreenLocate.DebugTexture(2, ScreenLocateTex);
  532. return ScreenLocateTex.AutoLight(10);
  533. //ScreenLocate.DebugTexture(2, ScreenLocateTexLighted);
  534. //var ScreenLocateTexR = ToLocateTex.ToRGB(ColorChannel.Red);
  535. //var ScreenLocateTexG = ToLocateTex.ToRGB(ColorChannel.Green);
  536. //var ScreenLocateTexB = ToLocateTex.ToRGB(ColorChannel.Blue);
  537. //LocateLightedRedTex = ScreenLocateTexR;
  538. //ScreenLocate.DebugTexture(2, ScreenLocateTexR);
  539. //ScreenLocate.DebugTexture(4, ScreenLocateTexG);
  540. //ScreenLocate.DebugTexture(5, ScreenLocateTexB);
  541. //var watch = new System.Diagnostics.Stopwatch();
  542. //watch.Start();
  543. //var times = new List<double>() { 0.0 };
  544. //var ScreenLocateTexLightedMat = texture.Too0Mat();
  545. }
  546. /// <param name="lineWidth">识别的最小线段长度</param>
  547. /// <param name="debugImages">这个参数如果不为null且数量大于0,则执行debug操作</param>
  548. void QuadrilateralFit(List<Texture2D> debugImages = null, float lineWidth = 10)
  549. {
  550. // 屏幕黑白差值,存放多批次的图像用于识别, 该List数量不能等于 0
  551. List<UnityEngine.Color[]> PixelsMultipleBatches = new List<UnityEngine.Color[]>();
  552. //读取数据
  553. if (debugImages != null && debugImages.Count != 0)
  554. {
  555. foreach (var i in debugImages)
  556. {
  557. Debug.Log($"<color=aqua>Debug {i.name}</color>");
  558. PixelsMultipleBatches.Add(i.GetPixels());
  559. }
  560. }
  561. else // 获得屏幕差值
  562. {
  563. var maxWhite = 0f;
  564. foreach (var i in ScreenWhiteTexture)
  565. {
  566. var m = i.x > i.y ? (i.x > i.z ? i.x : i.z) : (i.y > i.z ? i.y : i.z);
  567. if (maxWhite < m)
  568. maxWhite = m;
  569. }
  570. var scale = 1.0f / maxWhite; // 放大对比度
  571. var differPixel = new UnityEngine.Color[Size.x * Size.y];
  572. Parallel.For(0, Size.x * Size.y, i =>
  573. {
  574. var pi = ScreenWhiteTexture[i] - ScreenBlackTexture[i];
  575. differPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z) * scale;
  576. });
  577. PixelsMultipleBatches.Add(differPixel);
  578. PixelsMultipleBatches.Add(ScreenWhiteTexture.Select((i) => new UnityEngine.Color(i.x, i.y, i.z) * scale).ToArray());
  579. }
  580. int conSize = (int)Math.Ceiling(0.007f * Size.y) * 2 + 1;
  581. conSize = Math.Max(conSize, 7); // 设置最小为7
  582. float minLength = conSize * 7.7f;
  583. minLength = locateIndex == -1 ? minLength : minLength * areaPercent; // minLength需要按比例缩小
  584. string log = $"[ScreenLocate Auto] Size: ({Size.x},{Size.y}), 卷积核Size: {conSize}, 最小线段长度: {minLength}";
  585. var allLines = new List<LineIdentified>();
  586. List<Texture2D> LocateTexTemp = new List<Texture2D>();
  587. List<Matrix> ScreenLocateMatList = new List<Matrix>();
  588. foreach (var batch in PixelsMultipleBatches.Index())
  589. {
  590. var locateTex = ToLocateTex(PixelsMultipleBatches[batch]);
  591. LocateTexTemp.Add(locateTex);
  592. var ScreenLocateMat = locateTex.Too0Mat(); // 用于获取Lines的Matrix
  593. var lineCount = ZIMIdentifyQuadLSD(ref allLines, batch, ScreenLocateMat.zimIdentifyEdgeGradientAny(conSize), minLength);
  594. log += $"\r\n识别图片{batch}, 识别到的线段数量为: {lineCount}";
  595. ScreenLocateMatList.Add(ScreenLocateMat);
  596. }
  597. Texture2D ScreenLocateTexture = LocateTexTemp[0]; // for output
  598. // 过滤得到四边形的四条边, ScreenLocateMatList[0]默认是屏幕的黑白色差
  599. var quadLines = FilterLines(ScreenLocateMatList, allLines, GetAvgPoint(ScreenLocateMatList[0]),
  600. out Line[] oldLines, out List<Line> possibleLines,
  601. Screen, conSize, conSize, minLength);
  602. // 将 allLines 输出一张图片
  603. var allLinesMap = new Matrix(Size, Tiling: true);
  604. foreach (var l in allLines)
  605. {
  606. if (l.Line != null)
  607. o0Extension.DrawLine(allLinesMap, l.Line, (x, y) => 3, new Geometry2D.Float.Vector(0, 2), true);
  608. }
  609. var allLinesTex = allLinesMap.ToTexRGBA(FloatValueToColor);
  610. ScreenLocate.DebugTexture(1, allLinesTex);
  611. // 将识别到的边画出来,并判断能否拼成屏幕,能拼成则设置ScreenMap
  612. // 线段顺序: 下、右、上、左
  613. List<Line> LineIdentified = new List<Line>();
  614. for (int i = 0; i < 4; i++)
  615. {
  616. if (quadLines[i] != null)
  617. LineIdentified.Add(quadLines[i]);
  618. else if (oldLines != null)
  619. LineIdentified.Add(oldLines[i]);
  620. }
  621. var drawScreenMap = new Matrix(Size, Tiling: true);
  622. foreach (var l in LineIdentified)
  623. o0Extension.DrawLine(drawScreenMap, l, (x, y) => 1, new Geometry2D.Float.Vector(0, lineWidth));
  624. Texture2D ScreenQuadTex = drawScreenMap.ToTex(); // out ScreenQuadTex
  625. QuadrilateralInCamera screenQuad = null;
  626. if (LineIdentified.Count == 4)
  627. {
  628. var a = LineIdentified[0].Intersect(LineIdentified[3], false).Value;
  629. var b = LineIdentified[0].Intersect(LineIdentified[1], false).Value;
  630. var c = LineIdentified[2].Intersect(LineIdentified[3], false).Value;
  631. var d = LineIdentified[1].Intersect(LineIdentified[2], false).Value;
  632. screenQuad = new QuadrilateralInCamera(a, b, c, d, new Vector(Size.x, Size.y));
  633. if (!screenQuad.IsQuadComplete())
  634. screenQuad = null;
  635. }
  636. if (screenQuad == null && Screen.QuadInCamera != null) // 如果可能,回退到上一个screen
  637. {
  638. Debug.Log("<color=aqua>[ScreenIdentification] 本次识别失败,回退到上次的识别结果</color>");
  639. quadTemp.Add(Screen.QuadInCamera.Quad);
  640. }
  641. else if (screenQuad != null)
  642. {
  643. Debug.Log("<color=aqua>[ScreenIdentification] 识别到四边形</color>");
  644. quadTemp.Add(screenQuad.Quad);
  645. }
  646. // 还需要输出一张识别结果图,包含干扰线段
  647. var LSDLineMap = new Matrix(Size, Tiling: true);
  648. foreach (var l in possibleLines)
  649. {
  650. if (l != null && !quadLines.Contains(l))
  651. o0Extension.DrawLine(LSDLineMap, l, (x, y) => 3, new Geometry2D.Float.Vector(0, 2), true); // 其他的备选线段
  652. }
  653. foreach (var l in quadLines)
  654. {
  655. if (l != null)
  656. o0Extension.DrawLine(LSDLineMap, l, (x, y) => 2, new Geometry2D.Float.Vector(0, 4)); // 这次识别到的线段
  657. }
  658. if (oldLines != null)
  659. {
  660. foreach (var l in oldLines)
  661. o0Extension.DrawLine(LSDLineMap, l, (x, y) => 1, new Geometry2D.Float.Vector(0, 2), true); // 旧的屏幕线段(例如上次手动识别的)
  662. }
  663. Texture2D ChoosableLineTex = LSDLineMap.ToTexRGBA(FloatValueToColor);
  664. Debug.Log(log);
  665. // 是否将图片保存到本地
  666. if (ScreenLocate.Main.SaveToggle.isOn && ScreenLocate.Main.DebugOnZIMDemo)
  667. {
  668. var FileDirectory = $"Debug_屏幕定位/";
  669. SaveImages(FileDirectory, log, ScreenLocateTexture, allLinesTex, ChoosableLineTex, ScreenQuadTex);
  670. }
  671. //times.Add(watch.ElapsedMilliseconds);
  672. //UnityEngine.Debug.Log("time: " + (times[times.Count - 1] - times[times.Count - 2]));
  673. // opecncv处理, zim
  674. {
  675. //var cvLines = edge.cvHoughLinesP();
  676. //ScreenLocate.DebugTexture(5, cvLines);
  677. //var myLines = Hough.Transform(edgeMat);
  678. //var cvLines = edge.cvLine(myLines);
  679. //ScreenLocate.DebugTexture(5, cvLines);
  680. }
  681. {
  682. ScreenLocate.DebugTexture(2, ScreenLocateTexture);
  683. ScreenLocate.DebugTexture(3, ScreenQuadTex);
  684. // 融合线段和原图
  685. ScreenLocate.DebugTexture(4, ScreenLocateTexture.Merge(ScreenQuadTex));
  686. ScreenLocate.DebugTexture(5, ChoosableLineTex);
  687. }
  688. foreach (var i in LocateTexTemp)
  689. {
  690. if (i != ScreenLocateTexture) // ScreenLocateTexture 由 ScreenLocate.DebugTexture 释放
  691. GameObject.Destroy(i);
  692. }
  693. }
  694. Vector GetAvgPoint(Matrix screenLocateMat)
  695. {
  696. // 加权平均
  697. Vector[] avgPointsColumn = new Vector[screenLocateMat.Size.x];
  698. float[] valueSumsColumn = new float[screenLocateMat.Size.x];
  699. Parallel.For(0, screenLocateMat.Size.x, i =>
  700. {
  701. for (int j = 0; j < screenLocateMat.Size.y; j++)
  702. {
  703. var value = screenLocateMat[i, j];
  704. valueSumsColumn[i] += value;
  705. avgPointsColumn[i] += new Vector(i, j) * value;
  706. }
  707. });
  708. Vector avgPoint = Vector.Zero;
  709. var valueSum = 0f;
  710. for (int i = 0; i < screenLocateMat.Size.x; i++)
  711. {
  712. avgPoint += avgPointsColumn[i];
  713. valueSum += valueSumsColumn[i];
  714. }
  715. avgPoint /= valueSum;
  716. return avgPoint;
  717. }
  718. // 返回查找到的线段数量,0是查找失败
  719. int ZIMIdentifyQuadLSD(ref List<LineIdentified> allLines, int batch, (Matrix edgeMat, Matrix edgeDirMat) edgeGradient,
  720. float minLength = 100)
  721. {
  722. var l = edgeGradient.edgeMat.IdentifyLineLSD(edgeGradient.edgeDirMat, minLength, 50, LineCaptureSize: new Vector(10, 6));
  723. if (l == null || l.Count == 0)
  724. return 0;
  725. allLines.AddRange(l.Select((i) => new LineIdentified(batch, i)));
  726. return l.Count;
  727. }
  728. // 返回四边形的四条边,List长度一定是4 (如果没有识别到就是null),且线段顺序是: 下、右、上、左
  729. List<Line> FilterLines(List<Matrix> screenLocateMatList, List<LineIdentified> allLines, Vector avgPoint,
  730. out Line[] oldLines, out List<Line> possibleLines,
  731. ScreenMap screen, float conSize, float gradientLength, float minLength = 100)
  732. {
  733. //Debug.Log("[IdentifyLineLSD] lines.Count: " + lines.Count);
  734. // LSD计算得到的矩阵尺寸较小(因为卷积),这里必须进行位移
  735. var offset = new Vector((conSize - 1) / 2, (conSize - 1) / 2);
  736. for (int i = 0; i < allLines.Count; i++)
  737. allLines[i].Offset(offset);
  738. // 沿直线计算综合梯度(梯度乘以长度系数,再乘以距离系数), distanceRatio是实际距离除以最大距离
  739. float estimateGradient(LineIdentified line, float distanceRatio)
  740. {
  741. var dir = (line.Line.B - line.Line.A).Normalized;
  742. var vertical = new Vector(-dir.y, dir.x) * (gradientLength / 2);
  743. var step = 2;
  744. var ll = line.Line.Length;
  745. var lg = new List<float>();
  746. for (int i = 0; i <= ll; i += step)
  747. {
  748. var point = line.Line.A + dir * i;
  749. var ga = point + vertical;
  750. var gb = point - vertical;
  751. lg.Add(screenLocateMatList[line.Batch][(int)ga.x, (int)ga.y] - screenLocateMatList[line.Batch][(int)gb.x, (int)gb.y]);
  752. }
  753. float e = (float)Math.Pow(Math.Ceiling(line.Line.Length / minLength), 0.2f); // 长度系数,筛选时梯度更大、长度更长的线段更优
  754. float d = (3 - distanceRatio) / 2; // 距离系数,距离越近,系数越大
  755. return e * d * Math.Abs(lg.Mean());
  756. }
  757. // 下、右、上、左
  758. var quadLines = new List<(float, Line)>[4] { new List<(float, Line)>(), new List<(float, Line)>(), new List<(float, Line)>(), new List<(float, Line)>() };
  759. possibleLines = new List<Line>();
  760. oldLines = null;
  761. // 如果已有定位数据,根据现有数据筛选线条
  762. if (screen.QuadInCamera != null)
  763. {
  764. Debug.Log("[IdentifyLineLSD] 根据已有定位数据做筛选");
  765. screen.RefreshCameraSize(new Vector2(Size.x, Size.y));
  766. var calibration = ScreenLocate.Main.ReDoLocateCalibrationRatio * Size.y;
  767. oldLines = screen.QuadInCamera.GetLines();
  768. var pedals = oldLines.Select((i) => o0Extension.PointPedal(i, avgPoint, out _)).ToArray(); // 当前定位的垂足,下、右、上、左
  769. foreach (var i in allLines)
  770. {
  771. float minDistance = float.MaxValue;
  772. int index = -1;
  773. foreach (var j in pedals.Index())
  774. {
  775. var d = (o0Extension.PointPedal(i.Line, avgPoint, out _) - pedals[j]).Length;
  776. if (d < minDistance)
  777. {
  778. minDistance = d;
  779. index = j;
  780. }
  781. }
  782. //Debug.Log(minDistance +", -----------"+ calibration);
  783. if (minDistance < calibration) // 垂足的距离足够近
  784. {
  785. // 另外满足,新的线段的中点,到旧线段的垂足,要在旧线段内
  786. var middleToOldLine = o0Extension.PointPedal(oldLines[index], (i.Line.A + i.Line.B) / 2, out bool inLineSegment);
  787. if (inLineSegment)
  788. {
  789. quadLines[index].Add((estimateGradient(i, minDistance / calibration), i.Line));
  790. possibleLines.Add(i.Line);
  791. }
  792. }
  793. }
  794. }
  795. else
  796. {
  797. var avaAngleHalf = 75f;
  798. foreach (var line in allLines)
  799. {
  800. possibleLines.Add(line.Line);
  801. var a = (avgPoint - (line.Line.A + line.Line.B) / 2).DegreeToXAxis();
  802. //Debug.Log(a + ", " + gradient + ", " + sum);
  803. int index = -1;
  804. if (Math.Abs(a - line.GradientDegree) < avaAngleHalf || Math.Abs(a - 360 - line.GradientDegree) < avaAngleHalf || Math.Abs(a + 360 - line.GradientDegree) < avaAngleHalf)
  805. {
  806. if (line.GradientDegree > 45 && line.GradientDegree < 135) // 下
  807. index = 0;
  808. else if (line.GradientDegree > 135 && line.GradientDegree < 225) // 右
  809. index = 1;
  810. else if (line.GradientDegree > 225 && line.GradientDegree < 315) // 上
  811. index = 2;
  812. else
  813. index = 3;
  814. //var g = Math.Abs(lg.Mean());
  815. //Debug.Log(gradient + ", " + g);
  816. //List<float> lp1 = new List<float>(), lp2 = new List<float>(); // 线两侧的值
  817. //for (float i = 0; i <= ll; i += step)
  818. //{
  819. // var point = line.A + dir * i;
  820. // var ga = point + vertical;
  821. // var gb = point - vertical;
  822. // lp1.Add(screenLocateMat[(int)ga.x, (int)ga.y]);
  823. // lp2.Add(screenLocateMat[(int)gb.x, (int)gb.y]);
  824. //}
  825. //var avg1 = lp1.Mean();
  826. //var avg2 = lp2.Mean();
  827. //var v1 = lp1.Variance();
  828. //var v2 = lp2.Variance();
  829. //var lineGradient = Math.Abs(avg1 - avg2) / (v1 + v2 + 0.2f); // 方差越小,梯度的价值越高
  830. ////var g = Math.Abs(lg.Mean());
  831. ////Debug.Log(gradient + ", " + g);
  832. //Debug.Log(v1 + ", " + v2 + ", " + lineGradient);
  833. //quadLines[index].Add((lineGradient, line));
  834. quadLines[index].Add((estimateGradient(line, 1), line.Line));
  835. }
  836. }
  837. }
  838. var result = new Line[4];
  839. for (int i = 0; i < 4; i++)
  840. {
  841. if (quadLines[i].Count > 0)
  842. result[i] = quadLines[i].Max((a, b) => a.Item1.CompareTo(b.Item1)).Item2;
  843. }
  844. return result.ToList();
  845. }
  846. void SaveImages(string FileDirectory, string log, Texture2D ScreenLocateTex, Texture2D allLinesTex, Texture2D ChoosableLineTex, Texture2D ScreenQuadTex)
  847. {
  848. if (!Directory.Exists(FileDirectory))
  849. Directory.CreateDirectory(FileDirectory);
  850. var time = DateTime.Now.ToString("yyyyMMdd_HHmmss");
  851. var pngData = (ScreenLocate.Main.outputTexture2D[7] as Texture2D)?.EncodeToPNG();
  852. if (pngData != null)
  853. File.WriteAllBytes($"{FileDirectory}{time}A屏幕原图.png", pngData);
  854. var pngData1 = ScreenLocateTex.EncodeToPNG();
  855. if (pngData1 != null)
  856. File.WriteAllBytes($"{FileDirectory}{time}B黑白色差.png", pngData1);
  857. var pngData2 = allLinesTex.EncodeToPNG();
  858. if (pngData2 != null)
  859. File.WriteAllBytes($"{FileDirectory}{time}C全部识别线段.png", pngData2);
  860. var pngData3 = ChoosableLineTex.EncodeToPNG();
  861. if (pngData3 != null)
  862. File.WriteAllBytes($"{FileDirectory}{time}D备选线段.png", pngData3);
  863. var pngData4 = ScreenQuadTex.EncodeToPNG();
  864. if (pngData4 != null)
  865. File.WriteAllBytes($"{FileDirectory}{time}E识别结果.png", pngData4);
  866. Debug.Log($"<color=aqua>({time}) 屏幕识别图片保存至:程序根目录/{FileDirectory}</color>");
  867. log +=
  868. $"\r\n屏幕原图保存{pngData != null}, " +
  869. $"\r\n黑白色差保存{pngData1 != null}, " +
  870. $"\r\n全部识别线段保存{pngData2 != null}, " +
  871. $"\r\n备选线段保存{pngData3 != null}, " +
  872. $"\r\n识别结果保存{pngData4 != null}, ";
  873. File.WriteAllText($"{FileDirectory}{time}屏幕自动定位_日志.log", log);
  874. }
  875. }
  876. }