ScreenIdentification.cs 42 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001
  1. #define ENABLE_LOG
  2. using o0.Geometry2D.Float;
  3. using o0.Num;
  4. using System;
  5. using System.Collections.Generic;
  6. using System.IO;
  7. using System.Linq;
  8. using System.Threading.Tasks;
  9. using UnityEngine;
  10. using ZIM;
  11. using ZIM.Unity;
  12. namespace o0.Project
  13. {
  14. public partial class ScreenIdentification
  15. {
  16. private const string TAG = "ScreenIdentification#";
  17. //static Rect[][] LocateAreaData = new Rect[][] {
  18. // new Rect[] { new Rect(0f, 0f, 0.3f, 0.3f), new Rect(0f, 0f, 0.4f, 0.4f), new Rect(0f, 0f, 0.5f, 0.5f), new Rect(0f, 0f, 0.6f, 0.6f) },
  19. // new Rect[] { new Rect(0.7f, 0f, 0.3f, 0.3f), new Rect(0.6f, 0f, 0.4f, 0.4f), new Rect(0.5f, 0f, 0.5f, 0.5f), new Rect(0.4f, 0f, 0.6f, 0.6f) },
  20. // new Rect[] { new Rect(0f, 0.7f, 0.3f, 0.3f), new Rect(0f, 0.6f, 0.4f, 0.4f), new Rect(0f, 0.5f, 0.5f, 0.5f), new Rect(0f, 0.4f, 0.6f, 0.6f) },
  21. // new Rect[] { new Rect(0.7f, 0.7f, 0.3f, 0.3f), new Rect(0.6f, 0.6f, 0.4f, 0.4f), new Rect(0.5f, 0.5f, 0.5f, 0.5f), new Rect(0.4f, 0.4f, 0.6f, 0.6f) }
  22. //};
  23. static Rect[][] LocateAreaData = new Rect[][] {
  24. new Rect[] { new Rect(0f, 0f, 0.3f, 0.3f), new Rect(0f, 0f, 0.4f, 0.4f), new Rect(0f, 0f, 0.5f, 0.5f) },
  25. new Rect[] { new Rect(0.7f, 0f, 0.3f, 0.3f), new Rect(0.6f, 0f, 0.4f, 0.4f), new Rect(0.5f, 0f, 0.5f, 0.5f) },
  26. new Rect[] { new Rect(0f, 0.7f, 0.3f, 0.3f), new Rect(0f, 0.6f, 0.4f, 0.4f), new Rect(0f, 0.5f, 0.5f, 0.5f) },
  27. new Rect[] { new Rect(0.7f, 0.7f, 0.3f, 0.3f), new Rect(0.6f, 0.6f, 0.4f, 0.4f), new Rect(0.5f, 0.5f, 0.5f, 0.5f) }
  28. };
  29. //static bool LocateDebug = false;
  30. static bool LocateDebug = true;
  31. public Geometry2D.Vector<int> Size => ScreenLocate.Main.CameraSize;
  32. public ScreenMap Screen; // 识别到的屏幕,用于执行透视变换
  33. int capture = 0;
  34. int delay = 0;
  35. int maxCapture;
  36. int maxDelay;
  37. Geometry.Vector<float>[] ScreenBlackTexture;
  38. Geometry.Vector<float>[] ScreenWhiteTexture;
  39. int locateIndex = -1;
  40. List<Rect> locateArea = new List<Rect> {
  41. new Rect(0f, 0f, 0.5f, 0.5f), new Rect(0.5f, 0f, 0.5f, 0.5f), new Rect(0f, 0.5f, 0.5f, 0.5f), new Rect(0.5f, 0.5f, 0.5f, 0.5f)
  42. }; // 屏幕显示白色的区域大小
  43. float areaPercent => locateArea[locateIndex].size.x; // 当前白色区域的占比
  44. int areaSelected = -1; // 选择哪个区域,顺序与Quadrilateral对应
  45. List<float> sumTemp = new List<float>();
  46. List<OrdinalQuadrilateral> quadTemp = new List<OrdinalQuadrilateral>();
  47. //public ScreenIdentification(WebCamTexture texture)
  48. //{
  49. // Size = new Geometry2D.Vector<int>(texture.width, texture.height);
  50. // Screen = new ScreenMap();
  51. //}
  52. public static UnityEngine.Color FloatValueToColor(float i)
  53. {
  54. switch (i)
  55. {
  56. case 1:
  57. return UnityEngine.Color.green;
  58. case 2:
  59. return UnityEngine.Color.red;
  60. case 3:
  61. return UnityEngine.Color.yellow;
  62. default:
  63. return UnityEngine.Color.black;
  64. }
  65. }
  66. public ScreenIdentification()
  67. {
  68. Screen = new ScreenMap();
  69. OnLocateScreenEnter += () => Application.targetFrameRate = 30; // 固定识别的帧率,确保摄像机拍到正确的画面
  70. OnLocateScreenEnd += () => Application.targetFrameRate = 60;
  71. }
  72. public void SetScreenQuad(QuadrilateralInCamera quad) => Screen.QuadInCamera = quad;
  73. public event Action OnLocateScreenEnter;
  74. public event Action OnLocateScreenEnd;
  75. public bool bStartLocateScreen { get; set; } = false;//是否进行捕获
  76. // 自动识别开始的入口
  77. public void LocateScreen(int Capture = 30, int Delay = 30) //数值单位是frame
  78. {
  79. if (ScreenLocate.Main.DebugScreenImages.Count != 0 && ScreenLocate.Main.DebugOnZIMDemo) // 这段仅用于测试图片
  80. {
  81. ScreenLocate.Main.CameraSize = new Geometry2D.Vector<int>(ScreenLocate.Main.DebugScreenImages[0].width, ScreenLocate.Main.DebugScreenImages[0].height);
  82. DebugImage(ScreenLocate.Main.DebugScreenImages);
  83. Screen.QuadInCamera = new QuadrilateralInCamera(quadTemp[0], new Vector(ScreenLocate.Main.DebugScreenImages[0].width, ScreenLocate.Main.DebugScreenImages[0].height));
  84. ScreenLocate.SetScreen(null);
  85. ScreenLocate.Main.ShowScreen(ScreenLocate.Main.ScreenQuad, Screen.QuadInCamera);
  86. delay = 0;
  87. capture = 0;
  88. ScreenWhiteTexture = null;
  89. ScreenBlackTexture = null;
  90. locateIndex = -1;
  91. areaSelected = -1;
  92. quadTemp.Clear();
  93. sumTemp.Clear();
  94. ScreenLocate.Main.DebugScreenImages.Clear();
  95. return;
  96. }
  97. delay = Math.Max(Delay, 5);
  98. capture = Math.Max(Capture, 5);
  99. maxDelay = Delay;
  100. maxCapture = Capture;
  101. ScreenLocate.SetScreen(new Rect(0f, 0f, 1f, 1f), UnityEngine.Color.black);
  102. //ScreenLocate.SetScreen(new Rect(0f, 0f, 0.6f, 0.6f), UnityEngine.Color.white);
  103. //bStartLocateScreen = false;
  104. OnLocateScreenEnter?.Invoke();
  105. }
  106. /// <summary>
  107. /// 开始进行捕获
  108. /// 初始化了两个数据 capture 和 delay
  109. /// </summary>
  110. /// <returns></returns>
  111. public bool isInitLocateScreen() {
  112. return capture != 0 && delay != 0;
  113. }
  114. void DebugImage(List<Texture2D> images)
  115. {
  116. QuadrilateralFit(images, 5);
  117. //var watch = new System.Diagnostics.Stopwatch();
  118. //watch.Start();
  119. //var times = new List<double>() { 0.0 };
  120. #if (!NDEBUG && DEBUG && ENABLE_LOG)
  121. Console.WriteLine($"{TAG} quadTemp.Count:{quadTemp.Count}");
  122. #endif
  123. if (quadTemp.Count > 0)
  124. {
  125. var quad = quadTemp[0];
  126. ScreenLocate.Main.ShowScreen(ScreenLocate.Main.outputRawImages[4].transform.GetChild(0) as RectTransform,
  127. new QuadrilateralInCamera(quad, images[0].Size().o0Vector()));
  128. // 透视变换
  129. // var srcWidth = LocateLightedRedTex.width;
  130. // var transformWidth = (int)((quad.B.x - quad.A.x + quad.D.x - quad.C.x) / 2);
  131. // var transformHeight = (int)((quad.C.y - quad.A.y + quad.D.y - quad.B.y) / 2);
  132. // var transformTex = new Texture2D(transformWidth, transformHeight);
  133. // var pt = new ZIMPerspectiveTransform(new OrdinalQuadrilateral(new Vector(0, 0), new Vector(transformWidth, 0), new Vector(0, transformHeight), new Vector(transformWidth, transformHeight)), quad);
  134. // var dstPixel = new UnityEngine.Color[transformWidth * transformHeight];
  135. // var srcPixel = LocateLightedRedTex.GetPixels();
  136. // Parallel.For(0, transformWidth, (x) =>
  137. // {
  138. // for (int y = 0; y < transformHeight; y++)
  139. // {
  140. // var index = y * transformWidth + x;
  141. // var sampleCoord = pt.TransformRound(x, y);
  142. // dstPixel[index] = srcPixel[sampleCoord.y * srcWidth + sampleCoord.x];
  143. // }
  144. // });
  145. // transformTex.SetPixels(dstPixel);
  146. // transformTex.Apply();
  147. // //ScreenLocate.DebugTexture(1, transformTex);
  148. //#if (!NDEBUG && DEBUG && ENABLE_LOG)
  149. // Console.WriteLine($"{TAG} ScreenLocate.DebugTexture 1:{transformTex.GetNativeTexturePtr()}");
  150. //#endif
  151. }
  152. //times.Add(watch.ElapsedMilliseconds);
  153. //UnityEngine.Debug.Log("time: " + (times[times.Count - 1] - times[times.Count - 2]));
  154. }
  155. public void NextScreen()
  156. {
  157. // 测试用
  158. if (LocateDebug && areaSelected == -1)
  159. {
  160. LocateAreaData = new Rect[][] { new Rect[] { new Rect(0, 0, 1f, 1f) } };
  161. locateIndex = 3;
  162. areaSelected = 0;
  163. locateArea.AddRange(LocateAreaData[0]);
  164. }
  165. // index从-1开始
  166. locateIndex++;
  167. if (locateIndex < locateArea.Count) // 依次点亮屏幕区域
  168. {
  169. ScreenLocate.SetScreen(locateArea[locateIndex], UnityEngine.Color.white);
  170. delay = maxDelay;
  171. capture = maxCapture;
  172. }
  173. else // 退出屏幕黑白控制
  174. {
  175. ScreenLocate.SetScreen(null);
  176. ScreenLocate.Main.ShowScreen(ScreenLocate.Main.ScreenQuad, Screen.QuadInCamera);
  177. Reset();
  178. }
  179. }
  180. void Reset()
  181. {
  182. // bStartLocateScreen = false;
  183. delay = 0;
  184. capture = 0;
  185. ScreenWhiteTexture = null;
  186. ScreenBlackTexture = null;
  187. locateIndex = -1;
  188. areaSelected = -1;
  189. locateArea.RemoveRange(4, LocateAreaData[0].Length);
  190. quadTemp.Clear();
  191. sumTemp.Clear();
  192. }
  193. public void CaptureBlack(Texture2D cam)
  194. {
  195. if (ScreenBlackTexture == null)
  196. ScreenBlackTexture = new Geometry.Vector<float>[Size.x * Size.y];
  197. var pixel = cam.GetPixels();
  198. Parallel.For(0, Size.x * Size.y, i =>
  199. {
  200. var ip = pixel[i];
  201. ScreenBlackTexture[i] += new Geometry.Vector<float>(ip.r / maxCapture, ip.g / maxCapture, ip.b / maxCapture);
  202. });
  203. }
  204. public void CaptureWhite(Texture2D cam)
  205. {
  206. if (ScreenWhiteTexture == null)
  207. ScreenWhiteTexture = new Geometry.Vector<float>[Size.x * Size.y];
  208. var pixel = cam.GetPixels();
  209. Parallel.For(0, Size.x * Size.y, i =>
  210. {
  211. var ip = pixel[i];
  212. ScreenWhiteTexture[i] += new Geometry.Vector<float>(ip.r / maxCapture, ip.g / maxCapture, ip.b / maxCapture);
  213. });
  214. }
  215. public void CaptureStay(Texture2D cam)
  216. {
  217. if (locateIndex == -1) // 屏幕黑色
  218. {
  219. CaptureBlack(cam);
  220. }
  221. else // 屏幕部分为白色
  222. {
  223. CaptureWhite(cam);
  224. }
  225. }
  226. public void CaptureEnd()
  227. {
  228. //Debug.Log("locateIndex: " + locateIndex + ", quad: " + quadTemp.Count);
  229. if (locateIndex == -1)
  230. return;
  231. if (locateIndex < 4)
  232. {
  233. sumTemp.Add(GetBrightness());
  234. ScreenWhiteTexture = null;
  235. // 选择亮度差最大的区域
  236. if (locateIndex == 3)
  237. {
  238. areaSelected = sumTemp.MaxIndex();
  239. locateArea.AddRange(LocateAreaData[areaSelected]);
  240. }
  241. }
  242. else if (locateIndex >= 4 && locateIndex < locateArea.Count - 1)
  243. {
  244. QuadrilateralFit();
  245. ScreenWhiteTexture = null;
  246. }
  247. else
  248. {
  249. QuadrilateralFit();
  250. if (quadTemp.Count != LocateAreaData[0].Length)
  251. {
  252. Debug.Log($"<color=yellow>[ScreenIdentification] 拟合四边形失败, quadTemp.Count: {quadTemp.Count}</color>");
  253. }
  254. else if (quadTemp.Count == 1)
  255. {
  256. Screen.QuadInCamera = new QuadrilateralInCamera(quadTemp[0], new Vector(Size.x, Size.y));
  257. Debug.Log($"<color=aqua>[ScreenIdentification] 拟合成功,Quad: {Screen.QuadInCamera.QuadString}____{Screen.QuadInCamera.SizeString}</color>");
  258. }
  259. else
  260. {
  261. // Debug.Log($"拟合四边形 2 , quadTemp.Count: {quadTemp.Count}");
  262. // 线性拟合
  263. var xValue = new List<float>() { 0 };
  264. var predicts = new List<Vector>();
  265. foreach (var i in LocateAreaData[0])
  266. xValue.Add(i.size.x);
  267. Vector baseVertex = Vector.Zero; // x==0 时的点
  268. {
  269. foreach (var q in quadTemp)
  270. {
  271. baseVertex += q[areaSelected];
  272. }
  273. baseVertex /= quadTemp.Count;
  274. }
  275. double rs = 0.0;
  276. for (int i = 0; i < 4; i++)
  277. {
  278. if (i == areaSelected)
  279. {
  280. predicts.Add(baseVertex);
  281. }
  282. else
  283. {
  284. var yValue = new List<Vector>() { baseVertex };
  285. foreach (var q in quadTemp)
  286. {
  287. yValue.Add(q[i]);
  288. }
  289. var lr = LinerRegression1D.Fit(2, xValue.ToArray(), yValue.ToArray());
  290. rs += lr.RSquared / 3;
  291. predicts.Add(lr.Predict<Vector>(1));
  292. }
  293. }
  294. Screen.QuadInCamera = new QuadrilateralInCamera(predicts, new Vector(Size.x, Size.y));
  295. Debug.Log($"<color=aqua>[ScreenIdentification] 拟合成功,RSquared: {rs}, Quad: {Screen.QuadInCamera.QuadString}____{Screen.QuadInCamera.SizeString}</color>");
  296. //if (rs < 0.8) Screen.Quad = null;
  297. }
  298. OnLocateScreenEnd?.Invoke();
  299. }
  300. }
  301. public bool Update(Texture2D cam)
  302. {
  303. //if (!bStartLocateScreen) return false;
  304. if (delay != 0)
  305. {
  306. //ScreenLocate.Main.CreateUVCTexture2DFocusSizeIfNeeded(1280, 720);
  307. delay--;
  308. if (delay == 0)
  309. {
  310. ScreenLocate.Main.CameraSize = new Geometry2D.Vector<int>(cam.width, cam.height); // 记录当前的分辨率
  311. Debug.Log("[ScreenIdentification] 采样纹理,分辨率: [" + Size.x + ", " + Size.y + "]");
  312. }
  313. return true;
  314. }
  315. if (capture != 0)
  316. {
  317. //ScreenLocate.Main.CreateUVCTexture2DFocusSizeIfNeeded(1280, 720);
  318. CaptureStay(cam);
  319. capture--;
  320. if (capture == 0)
  321. {
  322. CaptureEnd();
  323. NextScreen();
  324. }
  325. return true;
  326. }
  327. return false;
  328. #region Old
  329. /*
  330. if (delay != 0)
  331. {
  332. delay--;
  333. return true;
  334. }
  335. if (capture != 0)
  336. {
  337. capture--;
  338. if (ScreenBlackTexture == null)
  339. ScreenBlackTexture = new Geometry.Vector<float>[Size.x * Size.y];
  340. var pixel = cam.GetPixels();
  341. Parallel.For(0, Size.x * Size.y, i =>
  342. {
  343. var ip = pixel[i];
  344. ScreenBlackTexture[i] += new Geometry.Vector<float>(ip.r, ip.g, ip.b);
  345. });
  346. if (capture == 0)
  347. ScreenLocate.SetScreen(UnityEngine.Color.black);
  348. return true;
  349. }
  350. if (delay != 0)
  351. {
  352. delay--;
  353. return true;
  354. }
  355. if (capture != 0)
  356. {
  357. capture--;
  358. if (ScreenWhiteTexture == null)
  359. ScreenWhiteTexture = new Geometry.Vector<float>[Size.x * Size.y];
  360. var pixel = cam.GetPixels();
  361. Parallel.For(0, Size.x * Size.y, i =>
  362. {
  363. var ip = pixel[i];
  364. ScreenWhiteTexture[i] += new Geometry.Vector<float>(ip.r, ip.g, ip.b);
  365. });
  366. if (capture == 0)
  367. ScreenLocate.SetScreen(UnityEngine.Color.black);
  368. return true;
  369. }
  370. if (delay != 0)
  371. {
  372. delay--;
  373. return true;
  374. }
  375. if (capture != 0)
  376. {
  377. capture--;
  378. var pixel = cam.GetPixels();
  379. Parallel.For(0, Size.x * Size.y, i =>
  380. {
  381. var ip = pixel[i];
  382. ScreenWhiteTexture[i] -= new Geometry.Vector<float>(ip.r, ip.g, ip.b);
  383. });
  384. if (capture == 0)
  385. {
  386. ScreenLocate.SetScreen(null);
  387. UnityEngine.Color[] newPixel = new UnityEngine.Color[Size.x * Size.y];
  388. Parallel.For(0, Size.x * Size.y, i => {
  389. var pi = ScreenWhiteTexture[i] /= capture;
  390. newPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z);
  391. });
  392. //读取数据
  393. //{
  394. // var fileName = "3.bin";
  395. // ScreenLocateTexture = $"2023 04 16 厦门测试数据/{fileName}".FileReadByte<Vector<float>[]>();
  396. // Debug.Log($"Read {fileName}");
  397. // Parallel.For(0, Size.x * Size.y, i =>
  398. // {
  399. // var pi = ScreenLocateTexture[i];
  400. // newPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z);
  401. // });
  402. //}
  403. var ScreenLocateTex = new Texture2D(Size.x, Size.y);
  404. ScreenLocateTex.SetPixels(newPixel);
  405. ScreenLocateTex.Apply();
  406. //ScreenLocate.DebugTexture(2, ScreenLocateTex);
  407. var ScreenLocateTexLighted = ScreenLocateTex.AutoLight(10);
  408. //ScreenLocate.DebugTexture(2, ScreenLocateTexLighted);
  409. //var FileSavePath = Application.persistentDataPath + "/ScreenLocateTexture.bin";
  410. bool Save = ScreenLocate.Main.SaveToggle.isOn;
  411. string time;
  412. if (Save)
  413. {
  414. time = DateTime.Now.ToString("yyyyMMdd_HHmmss");
  415. var FileSavePath = $"屏幕定位数据{time}.bin";
  416. FileSavePath.FileWriteByte(ScreenWhiteTexture);
  417. var bytes = ScreenLocateTexLighted.EncodeToPNG();
  418. File.WriteAllBytes($"屏幕定位数据{time}.png", bytes);
  419. Debug.Log("ScreenLocateTexture Saved To: " + FileSavePath);
  420. }
  421. var ScreenLocateTexR = ScreenLocateTexLighted.ToRGB(ColorChannel.Red);
  422. var ScreenLocateTexG = ScreenLocateTexLighted.ToRGB(ColorChannel.Green);
  423. var ScreenLocateTexB = ScreenLocateTexLighted.ToRGB(ColorChannel.Blue);
  424. ScreenLocate.DebugTexture(2, ScreenLocateTexR);
  425. //ScreenLocate.DebugTexture(4, ScreenLocateTexG);
  426. //ScreenLocate.DebugTexture(5, ScreenLocateTexB);
  427. var watch = new System.Diagnostics.Stopwatch();
  428. watch.Start();
  429. var times = new List<double>() { 0.0 };
  430. var ScreenLocateTexLightedMat = ScreenLocateTexLighted.Too0Mat();
  431. //var ScreenLocateTexLightedMat = texture.Too0Mat();
  432. //var (edge, edgeDir) = ScreenLocateTexLightedMat.IdentifyEdge();
  433. var (edge, edgeDir) = ScreenLocateTexLightedMat.zimIdentifyEdgeGradientAny(15);
  434. //ScreenLocate.DebugTexture(4, ScreenLocateTexLighted.Too0Mat().IdentifyEdgeGradient().ToTex());
  435. //ScreenLocate.DebugTexture(4, edge.ToTex());
  436. var quadLines = ScreenLocateTexLightedMat.IdentifyQuadLSD(edge, edgeDir, out List<Line> lightLines, 30);
  437. var drawLineMap = new MatrixF2D(edge..Size.x, edge.Size.y);
  438. int lineCount = 0;
  439. foreach (var l in quadLines)
  440. {
  441. if (l != null)
  442. {
  443. o0Extension.DrawLine(drawLineMap.DrawLine(l, (x, y) => 1, new Geometry2D.Float.Vector(0, 10));
  444. lineCount++;
  445. }
  446. }
  447. if (lineCount == 4)
  448. {
  449. var a = quadLines[0].Intersect(quadLines[3], false).Value;
  450. var b = quadLines[0].Intersect(quadLines[1], false).Value;
  451. var c = quadLines[2].Intersect(quadLines[3], false).Value;
  452. var d = quadLines[1].Intersect(quadLines[2], false).Value;
  453. Quad = new Quadrilateral(a, b, c, d);
  454. if (!Quad.IsInScreen(ScreenLocate.Main.WebCamera.Size))
  455. Quad = null;
  456. }
  457. ScreenLocate.Main.ShowScreen(Quad);
  458. //var lines = edge.IdentifyLineLSD(edgeDir, 100);
  459. ////var lines = ScreenLocateTexLightedMat.IdentifyLineLSD();
  460. //var drawLineMap = new MatrixF2D(edge..Size.x, edge.Size.y);
  461. //var returnMaxLines = lines.Sub(0, 10);
  462. //foreach (var (line, sum, gradient) in returnMaxLines)
  463. // o0Extension.DrawLine(drawLineMap.DrawLine(line, (x, y) => 1, new Geometry2D.Float.Vector(0, 10));
  464. ScreenLocate.DebugTexture(3, drawLineMap.ToTex());
  465. //{
  466. // var bytes = drawLineMap.ToTex().EncodeToPNG();
  467. // File.WriteAllBytes($"屏幕定位数据DrawLineMap.png", bytes);
  468. //}
  469. times.Add(watch.ElapsedMilliseconds);
  470. UnityEngine.Debug.Log("time: " + (times[times.Count - 1] - times[times.Count - 2]));
  471. //ScreenLocate.DebugTexture(5, edge.IdentifyLine(edgeDir).ToTex());
  472. //ScreenLocate.DebugTexture(4, ScreenLocateTexLighted.Too0Mat().IdentifyEdgeGradientX().ToTex());
  473. //ScreenLocate.DebugTexture(5, ScreenLocateTexLighted.Too0Mat().IdentifyEdgeGradientY().ToTex());
  474. //var convolutionLighted2 = ScreenLocateTexLighted.Too0Mat().IdentifyEdgeVariance().ToTex();
  475. // opecncv处理
  476. // zim
  477. {
  478. //var cvLines = edge.cvHoughLinesP();
  479. //ScreenLocate.DebugTexture(5, cvLines);
  480. //var myLines = Hough.Transform(edgeMat);
  481. //var cvLines = edge.cvLine(myLines);
  482. //ScreenLocate.DebugTexture(5, cvLines);
  483. }
  484. UnityEngine.Object.Destroy(ScreenLocateTex);
  485. //ScreenLocate.DebugTexture(4, convolutionLighted2);
  486. }
  487. return true;
  488. }
  489. /*
  490. var avg = new Geometry4D.Vector<float>();
  491. var pixel = texture.GetPixels();
  492. foreach(var i in pixel.Index())
  493. {
  494. var iP = pixel[i];
  495. avg += new Geometry4D.Vector<float>(iP.r, iP.g, iP.b, iP.a);
  496. }
  497. avg /= pixel.Count();
  498. /*
  499. var (texLightedR, texLightedG, texLightedB) = ToRGB(newTex);
  500. ScreenLocate.DebugTexture(3, texLightedR);
  501. ScreenLocate.DebugTexture(4, texLightedG);
  502. ScreenLocate.DebugTexture(5, texLightedB);
  503. //Debug.Log(avg);
  504. return false;
  505. /**/
  506. #endregion
  507. }
  508. float GetBrightness()
  509. {
  510. UnityEngine.Color[] differPixel = new UnityEngine.Color[Size.x * Size.y];
  511. Parallel.For(0, Size.x * Size.y, i =>
  512. {
  513. var pi = ScreenWhiteTexture[i] - ScreenBlackTexture[i];
  514. differPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z);
  515. });
  516. var sum = 0f;
  517. foreach (var i in differPixel)
  518. {
  519. sum += i.Brightness();
  520. }
  521. sum /= differPixel.Length;
  522. //Debug.Log(sum);
  523. return sum;
  524. }
  525. // 转换成屏幕定位所需的纹理图像
  526. Texture2D ToLocateTex(UnityEngine.Color[] pixels)
  527. {
  528. var ScreenLocateTex = new Texture2D(Size.x, Size.y);
  529. ScreenLocateTex.SetPixels(pixels);
  530. ScreenLocateTex.Apply();
  531. //ScreenLocate.DebugTexture(2, ScreenLocateTex);
  532. return ScreenLocateTex.AutoLight(10);
  533. //ScreenLocate.DebugTexture(2, ScreenLocateTexLighted);
  534. //var ScreenLocateTexR = ToLocateTex.ToRGB(ColorChannel.Red);
  535. //var ScreenLocateTexG = ToLocateTex.ToRGB(ColorChannel.Green);
  536. //var ScreenLocateTexB = ToLocateTex.ToRGB(ColorChannel.Blue);
  537. //LocateLightedRedTex = ScreenLocateTexR;
  538. //ScreenLocate.DebugTexture(2, ScreenLocateTexR);
  539. //ScreenLocate.DebugTexture(4, ScreenLocateTexG);
  540. //ScreenLocate.DebugTexture(5, ScreenLocateTexB);
  541. //var watch = new System.Diagnostics.Stopwatch();
  542. //watch.Start();
  543. //var times = new List<double>() { 0.0 };
  544. //var ScreenLocateTexLightedMat = texture.Too0Mat();
  545. }
  546. /// <param name="ScreenLocateTexture">用于算法检测线段的图片</param>
  547. /// <param name="ChoosableLineTex">输出备选线段</param>
  548. /// <param name="ScreenQuadTex">输出最终结果</param>
  549. /// <param name="lineWidth">识别的最小线段长度</param>
  550. /// <param name="debugImage">这个参数如果不为null,则执行debug操作</param>
  551. void QuadrilateralFit(List<Texture2D> debugImages = null, float lineWidth = 10)
  552. {
  553. // 屏幕黑白差值,存放多批次的图像用于识别, 该List数量不能等于 0
  554. List<UnityEngine.Color[]> PixelsMultipleBatches = new List<UnityEngine.Color[]>();
  555. //读取数据
  556. if (debugImages != null && debugImages.Count != 0)
  557. {
  558. foreach (var i in debugImages)
  559. {
  560. Debug.Log($"<color=aqua>Debug {i.name}</color>");
  561. PixelsMultipleBatches.Add(i.GetPixels());
  562. }
  563. }
  564. else // 获得屏幕差值
  565. {
  566. var maxWhite = 0f;
  567. foreach (var i in ScreenWhiteTexture)
  568. {
  569. var m = i.x > i.y ? (i.x > i.z ? i.x : i.z) : (i.y > i.z ? i.y : i.z);
  570. if (maxWhite < m)
  571. maxWhite = m;
  572. }
  573. var scale = 1.0f / maxWhite; // 放大对比度
  574. var differPixel = new UnityEngine.Color[Size.x * Size.y];
  575. Parallel.For(0, Size.x * Size.y, i =>
  576. {
  577. var pi = ScreenWhiteTexture[i] - ScreenBlackTexture[i];
  578. differPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z) * scale;
  579. });
  580. PixelsMultipleBatches.Add(differPixel);
  581. PixelsMultipleBatches.Add(ScreenWhiteTexture.Select((i) => new UnityEngine.Color(i.x, i.y, i.z) * scale).ToArray());
  582. }
  583. int conSize = (int)Math.Ceiling(0.007f * Size.y) * 2 + 1;
  584. conSize = Math.Max(conSize, 7); // 设置最小为7
  585. float minLength = conSize * 7.7f;
  586. minLength = locateIndex == -1 ? minLength : minLength * areaPercent; // minLength需要按比例缩小
  587. string log = $"[ScreenLocate Auto] Size: ({Size.x},{Size.y}), 卷积核Size: {conSize}, 最小线段长度: {minLength}";
  588. var allLines = new List<LineIdentified>();
  589. Texture2D ScreenLocateTexture = null;
  590. List<Texture2D> LocateTexTemp = new List<Texture2D>();
  591. List<Matrix> ScreenLocateMatList = new List<Matrix>();
  592. foreach (var batch in PixelsMultipleBatches.Index())
  593. {
  594. var locateTex = ToLocateTex(PixelsMultipleBatches[batch]);
  595. LocateTexTemp.Add(locateTex);
  596. var ScreenLocateMat = locateTex.Too0Mat(); // 用于获取Lines的Matrix
  597. var lineCount = ZIMIdentifyQuadLSD(ref allLines, batch, ScreenLocateMat.zimIdentifyEdgeGradientAny(conSize));
  598. log += $"\r\n识别图片{batch}, 识别到的线段数量为: {lineCount}";
  599. ScreenLocateMatList.Add(ScreenLocateMat);
  600. }
  601. ScreenLocateTexture = LocateTexTemp[0];
  602. // 过滤得到四边形的四条边
  603. var quadLines = FilterLines(ScreenLocateMatList, allLines, GetAvgPoint(ScreenLocateMatList[0]),
  604. out Line[] oldLines, out List<Line> possibleLines,
  605. Screen, conSize, conSize, minLength);
  606. // 将 allLines 输出一张图片
  607. var allLinesMap = new Matrix(Size, Tiling: true);
  608. foreach (var l in allLines)
  609. {
  610. if (l.Line != null)
  611. o0Extension.DrawLine(allLinesMap, l.Line, (x, y) => 3, new Geometry2D.Float.Vector(0, 2), true);
  612. }
  613. var allLinesTex = allLinesMap.ToTexRGBA(FloatValueToColor);
  614. ScreenLocate.DebugTexture(1, allLinesTex);
  615. // 将识别到的边画出来,并判断能否拼成屏幕,能拼成则设置ScreenMap
  616. // 线段顺序: 下、右、上、左
  617. List<Line> LineIdentified = new List<Line>();
  618. for (int i = 0; i < 4; i++)
  619. {
  620. if (quadLines[i] != null)
  621. LineIdentified.Add(quadLines[i]);
  622. else if (oldLines != null)
  623. LineIdentified.Add(oldLines[i]);
  624. }
  625. var drawScreenMap = new Matrix(Size, Tiling: true);
  626. foreach (var l in LineIdentified)
  627. o0Extension.DrawLine(drawScreenMap, l, (x, y) => 1, new Geometry2D.Float.Vector(0, lineWidth));
  628. Texture2D ScreenQuadTex = drawScreenMap.ToTex(); // out ScreenQuadTex
  629. QuadrilateralInCamera screenQuad = null;
  630. if (LineIdentified.Count == 4)
  631. {
  632. var a = LineIdentified[0].Intersect(LineIdentified[3], false).Value;
  633. var b = LineIdentified[0].Intersect(LineIdentified[1], false).Value;
  634. var c = LineIdentified[2].Intersect(LineIdentified[3], false).Value;
  635. var d = LineIdentified[1].Intersect(LineIdentified[2], false).Value;
  636. screenQuad = new QuadrilateralInCamera(a, b, c, d, new Vector(Size.x, Size.y));
  637. if (!screenQuad.IsQuadComplete())
  638. screenQuad = null;
  639. }
  640. if (screenQuad == null && Screen.QuadInCamera != null) // 如果可能,回退到上一个screen
  641. {
  642. Debug.Log("<color=aqua>[ScreenIdentification] 本次识别失败,回退到上次的识别结果</color>");
  643. quadTemp.Add(Screen.QuadInCamera.Quad);
  644. }
  645. else if (screenQuad != null)
  646. {
  647. Debug.Log("<color=aqua>[ScreenIdentification] 识别到四边形</color>");
  648. quadTemp.Add(screenQuad.Quad);
  649. }
  650. // 还需要输出一张识别结果图,包含干扰线段
  651. var LSDLineMap = new Matrix(Size, Tiling: true);
  652. foreach (var l in possibleLines)
  653. {
  654. if (l != null && !quadLines.Contains(l))
  655. o0Extension.DrawLine(LSDLineMap, l, (x, y) => 3, new Geometry2D.Float.Vector(0, 2), true); // 其他的备选线段
  656. }
  657. foreach (var l in quadLines)
  658. {
  659. if (l != null)
  660. o0Extension.DrawLine(LSDLineMap, l, (x, y) => 2, new Geometry2D.Float.Vector(0, 4)); // 这次识别到的线段
  661. }
  662. if (oldLines != null)
  663. {
  664. foreach (var l in oldLines)
  665. o0Extension.DrawLine(LSDLineMap, l, (x, y) => 1, new Geometry2D.Float.Vector(0, 2), true); // 旧的屏幕线段(例如上次手动识别的)
  666. }
  667. Texture2D ChoosableLineTex = LSDLineMap.ToTexRGBA(FloatValueToColor);
  668. Debug.Log(log);
  669. // 是否将图片保存到本地
  670. if (ScreenLocate.Main.SaveToggle.isOn && ScreenLocate.Main.DebugOnZIMDemo)
  671. {
  672. var FileDirectory = $"Debug_屏幕定位/";
  673. SaveImages(FileDirectory, log, ScreenLocateTexture, allLinesTex, ScreenQuadTex);
  674. }
  675. //times.Add(watch.ElapsedMilliseconds);
  676. //UnityEngine.Debug.Log("time: " + (times[times.Count - 1] - times[times.Count - 2]));
  677. // opecncv处理, zim
  678. {
  679. //var cvLines = edge.cvHoughLinesP();
  680. //ScreenLocate.DebugTexture(5, cvLines);
  681. //var myLines = Hough.Transform(edgeMat);
  682. //var cvLines = edge.cvLine(myLines);
  683. //ScreenLocate.DebugTexture(5, cvLines);
  684. }
  685. {
  686. ScreenLocate.DebugTexture(2, ScreenLocateTexture);
  687. ScreenLocate.DebugTexture(3, ScreenQuadTex);
  688. // 融合线段和原图
  689. ScreenLocate.DebugTexture(4, ScreenLocateTexture.Merge(ScreenQuadTex));
  690. ScreenLocate.DebugTexture(5, ChoosableLineTex);
  691. }
  692. foreach (var i in LocateTexTemp)
  693. {
  694. if (i != ScreenLocateTexture) // ScreenLocateTexture 由 ScreenLocate.DebugTexture 释放
  695. GameObject.Destroy(i);
  696. }
  697. }
  698. Vector GetAvgPoint(Matrix screenLocateMat)
  699. {
  700. // 加权平均
  701. Vector[] avgPointsColumn = new Vector[screenLocateMat.Size.x];
  702. float[] valueSumsColumn = new float[screenLocateMat.Size.x];
  703. Parallel.For(0, screenLocateMat.Size.x, i =>
  704. {
  705. for (int j = 0; j < screenLocateMat.Size.y; j++)
  706. {
  707. var value = screenLocateMat[i, j];
  708. valueSumsColumn[i] += value;
  709. avgPointsColumn[i] += new Vector(i, j) * value;
  710. }
  711. });
  712. Vector avgPoint = Vector.Zero;
  713. var valueSum = 0f;
  714. for (int i = 0; i < screenLocateMat.Size.x; i++)
  715. {
  716. avgPoint += avgPointsColumn[i];
  717. valueSum += valueSumsColumn[i];
  718. }
  719. avgPoint /= valueSum;
  720. return avgPoint;
  721. }
  722. // 返回查找到的线段数量,0是查找失败
  723. int ZIMIdentifyQuadLSD(ref List<LineIdentified> allLines, int batch, (Matrix edgeMat, Matrix edgeDirMat) edgeGradient,
  724. float minLength = 100)
  725. {
  726. var l = edgeGradient.edgeMat.IdentifyLineLSD(edgeGradient.edgeDirMat, minLength, 20, LineCaptureSize: new Vector(0, 5));
  727. if (l == null || l.Count == 0)
  728. return 0;
  729. allLines.AddRange(l.Select((i) => new LineIdentified(batch, i)));
  730. return l.Count;
  731. }
  732. // 返回四边形的四条边,List长度一定是4 (如果没有识别到就是null),且线段顺序是: 下、右、上、左
  733. List<Line> FilterLines(List<Matrix> screenLocateMatList, List<LineIdentified> allLines, Vector avgPoint,
  734. out Line[] oldLines, out List<Line> possibleLines,
  735. ScreenMap screen, float conSize, float gradientLength, float minLength = 100)
  736. {
  737. //Debug.Log("[IdentifyLineLSD] lines.Count: " + lines.Count);
  738. // LSD计算得到的矩阵尺寸较小(因为卷积),这里必须进行位移
  739. var offset = new Vector((conSize - 1) / 2, (conSize - 1) / 2);
  740. for (int i = 0; i < allLines.Count; i++)
  741. allLines[i].Offset(offset);
  742. // 沿直线计算综合梯度(梯度乘以长度系数,再乘以距离系数), distanceRatio是实际距离除以最大距离
  743. float estimateGradient(LineIdentified line, float distanceRatio)
  744. {
  745. var dir = (line.Line.B - line.Line.A).Normalized;
  746. var vertical = new Vector(-dir.y, dir.x) * (gradientLength / 2);
  747. var step = 2;
  748. var ll = line.Line.Length;
  749. var lg = new List<float>();
  750. for (int i = 0; i <= ll; i += step)
  751. {
  752. var point = line.Line.A + dir * i;
  753. var ga = point + vertical;
  754. var gb = point - vertical;
  755. lg.Add(screenLocateMatList[line.Batch][(int)ga.x, (int)ga.y] - screenLocateMatList[line.Batch][(int)gb.x, (int)gb.y]);
  756. }
  757. float e = (float)Math.Sqrt(Math.Max(1, line.Line.Length / minLength / 3)); // 长度系数,筛选时梯度更大、长度更长的线段更优
  758. float d = (3 - distanceRatio) / 2; // 距离系数,距离越近,系数越大
  759. return e * d * Math.Abs(lg.Mean());
  760. }
  761. // 下、右、上、左
  762. var quadLines = new List<(float, Line)>[4] { new List<(float, Line)>(), new List<(float, Line)>(), new List<(float, Line)>(), new List<(float, Line)>() };
  763. possibleLines = new List<Line>();
  764. oldLines = null;
  765. // 如果已有定位数据,根据现有数据筛选线条
  766. if (screen.QuadInCamera != null)
  767. {
  768. Debug.Log("[IdentifyLineLSD] 根据已有定位数据做筛选");
  769. screen.RefreshCameraSize(new Vector2(Size.x, Size.y));
  770. var calibration = ScreenLocate.Main.ReDoLocateCalibrationRatio * Size.y;
  771. oldLines = screen.QuadInCamera.GetLines();
  772. var pedals = oldLines.Select((i) => o0Extension.PointPedal(i, avgPoint)).ToArray(); // 当前定位的垂足,下、右、上、左
  773. foreach (var i in allLines)
  774. {
  775. float minDistance = float.MaxValue;
  776. int index = -1;
  777. foreach (var j in pedals.Index())
  778. {
  779. var d = (o0Extension.PointPedal(i.Line, avgPoint) - pedals[j]).Length;
  780. if (d < minDistance)
  781. {
  782. minDistance = d;
  783. index = j;
  784. }
  785. }
  786. //Debug.Log(minDistance +", -----------"+ calibration);
  787. if (minDistance < calibration) // 垂足的距离足够近
  788. {
  789. quadLines[index].Add((estimateGradient(i, minDistance / calibration), i.Line));
  790. possibleLines.Add(i.Line);
  791. }
  792. }
  793. }
  794. else
  795. {
  796. var avaAngleHalf = 75f;
  797. foreach (var line in allLines)
  798. {
  799. possibleLines.Add(line.Line);
  800. var a = (avgPoint - (line.Line.A + line.Line.B) / 2).DegreeToXAxis();
  801. //Debug.Log(a + ", " + gradient + ", " + sum);
  802. int index = -1;
  803. if (Math.Abs(a - line.GradientDegree) < avaAngleHalf || Math.Abs(a - 360 - line.GradientDegree) < avaAngleHalf || Math.Abs(a + 360 - line.GradientDegree) < avaAngleHalf)
  804. {
  805. if (line.GradientDegree > 45 && line.GradientDegree < 135) // 下
  806. index = 0;
  807. else if (line.GradientDegree > 135 && line.GradientDegree < 225) // 右
  808. index = 1;
  809. else if (line.GradientDegree > 225 && line.GradientDegree < 315) // 上
  810. index = 2;
  811. else
  812. index = 3;
  813. //var g = Math.Abs(lg.Mean());
  814. //Debug.Log(gradient + ", " + g);
  815. //List<float> lp1 = new List<float>(), lp2 = new List<float>(); // 线两侧的值
  816. //for (float i = 0; i <= ll; i += step)
  817. //{
  818. // var point = line.A + dir * i;
  819. // var ga = point + vertical;
  820. // var gb = point - vertical;
  821. // lp1.Add(screenLocateMat[(int)ga.x, (int)ga.y]);
  822. // lp2.Add(screenLocateMat[(int)gb.x, (int)gb.y]);
  823. //}
  824. //var avg1 = lp1.Mean();
  825. //var avg2 = lp2.Mean();
  826. //var v1 = lp1.Variance();
  827. //var v2 = lp2.Variance();
  828. //var lineGradient = Math.Abs(avg1 - avg2) / (v1 + v2 + 0.2f); // 方差越小,梯度的价值越高
  829. ////var g = Math.Abs(lg.Mean());
  830. ////Debug.Log(gradient + ", " + g);
  831. //Debug.Log(v1 + ", " + v2 + ", " + lineGradient);
  832. //quadLines[index].Add((lineGradient, line));
  833. quadLines[index].Add((estimateGradient(line, 1), line.Line));
  834. }
  835. }
  836. }
  837. var result = new Line[4];
  838. for (int i = 0; i < 4; i++)
  839. {
  840. if (quadLines[i].Count > 0)
  841. result[i] = quadLines[i].Max((a, b) => a.Item1.CompareTo(b.Item1)).Item2;
  842. }
  843. return result.ToList();
  844. }
  845. void SaveImages(string FileDirectory, string log, Texture2D ScreenLocateTex, Texture2D allLinesTex, Texture2D ScreenQuadTex)
  846. {
  847. if (!Directory.Exists(FileDirectory))
  848. Directory.CreateDirectory(FileDirectory);
  849. var time = DateTime.Now.ToString("yyyyMMdd_HHmmss");
  850. var pngData = (ScreenLocate.Main.outputTexture2D[7] as Texture2D)?.EncodeToPNG();
  851. if (pngData != null)
  852. File.WriteAllBytes($"{FileDirectory}{time}A屏幕原图.png", pngData);
  853. var pngData1 = ScreenLocateTex.EncodeToPNG();
  854. if (pngData1 != null)
  855. File.WriteAllBytes($"{FileDirectory}{time}B黑白色差.png", pngData1);
  856. var pngData2 = allLinesTex.EncodeToPNG();
  857. if (pngData2 != null)
  858. File.WriteAllBytes($"{FileDirectory}{time}C全部识别线段.png", pngData2);
  859. var pngData3 = ScreenQuadTex.EncodeToPNG();
  860. if (pngData3 != null)
  861. File.WriteAllBytes($"{FileDirectory}{time}D识别结果.png", pngData3);
  862. Debug.Log($"<color=aqua>({time}) 屏幕识别图片保存至:程序根目录/{FileDirectory}</color>");
  863. log += $"\r\n屏幕原图保存{pngData != null}, \r\n黑白色差保存{pngData1 != null}, \r\n全部识别线段保存{pngData2 != null}, \r\n识别结果保存{pngData3 != null}, ";
  864. File.WriteAllText($"{FileDirectory}{time}屏幕自动定位_日志.log", log);
  865. }
  866. }
  867. }