ScreenIdentification.cs 56 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232
  1. #define ENABLE_LOG
  2. using o0.Geometry2D.Float;
  3. using o0.Num;
  4. using System;
  5. using System.Collections.Generic;
  6. using System.IO;
  7. using System.Linq;
  8. using System.Threading.Tasks;
  9. using UnityEngine;
  10. using UnityEngine.UIElements;
  11. using UnityStandardAssets.Utility;
  12. using ZIM;
  13. using ZIM.Unity;
  14. namespace o0.Project
  15. {
  16. public partial class ScreenIdentification
  17. {
  18. private const string TAG = "ScreenIdentification#";
  19. // LocateAreaData表示每次屏幕的色差变化的区域,可能有多次。通过设置LocateSingleStep可调整为仅识别一次色差
  20. static Rect[][] LocateAreaData = new Rect[][] {
  21. new Rect[] { new Rect(0f, 0f, 0.3f, 0.3f), new Rect(0f, 0f, 0.4f, 0.4f), new Rect(0f, 0f, 0.5f, 0.5f) },
  22. new Rect[] { new Rect(0.7f, 0f, 0.3f, 0.3f), new Rect(0.6f, 0f, 0.4f, 0.4f), new Rect(0.5f, 0f, 0.5f, 0.5f) },
  23. new Rect[] { new Rect(0f, 0.7f, 0.3f, 0.3f), new Rect(0f, 0.6f, 0.4f, 0.4f), new Rect(0f, 0.5f, 0.5f, 0.5f) },
  24. new Rect[] { new Rect(0.7f, 0.7f, 0.3f, 0.3f), new Rect(0.6f, 0.6f, 0.4f, 0.4f), new Rect(0.5f, 0.5f, 0.5f, 0.5f) }
  25. };
  26. //static Rect[][] LocateAreaData = new Rect[][] {
  27. // new Rect[] { new Rect(0f, 0f, 0.3f, 0.3f), new Rect(0f, 0f, 0.4f, 0.4f), new Rect(0f, 0f, 0.5f, 0.5f), new Rect(0f, 0f, 0.6f, 0.6f) },
  28. // new Rect[] { new Rect(0.7f, 0f, 0.3f, 0.3f), new Rect(0.6f, 0f, 0.4f, 0.4f), new Rect(0.5f, 0f, 0.5f, 0.5f), new Rect(0.4f, 0f, 0.6f, 0.6f) },
  29. // new Rect[] { new Rect(0f, 0.7f, 0.3f, 0.3f), new Rect(0f, 0.6f, 0.4f, 0.4f), new Rect(0f, 0.5f, 0.5f, 0.5f), new Rect(0f, 0.4f, 0.6f, 0.6f) },
  30. // new Rect[] { new Rect(0.7f, 0.7f, 0.3f, 0.3f), new Rect(0.6f, 0.6f, 0.4f, 0.4f), new Rect(0.5f, 0.5f, 0.5f, 0.5f), new Rect(0.4f, 0.4f, 0.6f, 0.6f) }
  31. //};
  32. //static bool LocateSingleStep = false;
  33. static bool LocateSingleStep = true;
  34. public Geometry2D.Vector<int> Size => ScreenLocate.Main.CameraSize;
  35. public QuadrilateralInCamera QuadManual;
  36. public QuadrilateralInCamera QuadAuto; // 全自动,可以给用户选择(赋值给Screen.QuadInCamera即生效)
  37. public QuadrilateralInCamera QuadSemiAuto; // 半自动,可以给用户选择(赋值给Screen.QuadInCamera即生效)
  38. public ScreenMap Screen; // 识别到的屏幕,用于执行透视变换
  39. int capture = 0;
  40. int delay = 0;
  41. int maxCapture;
  42. int maxDelay;
  43. Geometry.Vector<float>[] ScreenBlackTexture;
  44. Geometry.Vector<float>[] ScreenWhiteTexture;
  45. int locateIndex = -1;
  46. readonly List<Rect> locateArea = new List<Rect> {
  47. new Rect(0f, 0f, 0.5f, 0.5f), new Rect(0.5f, 0f, 0.5f, 0.5f), new Rect(0f, 0.5f, 0.5f, 0.5f), new Rect(0.5f, 0.5f, 0.5f, 0.5f)
  48. }; // 屏幕显示白色的区域大小
  49. float areaPercent => locateArea[locateIndex].size.x; // 当前白色区域的占比
  50. int areaSelected = -1; // 选择哪个区域,顺序与Quadrilateral对应
  51. readonly List<float> sumTemp = new List<float>();
  52. readonly List<QuadrilateralInCamera> quadTemp = new List<QuadrilateralInCamera>();
  53. //public ScreenIdentification(WebCamTexture texture)
  54. //{
  55. // Size = new Geometry2D.Vector<int>(texture.width, texture.height);
  56. // Screen = new ScreenMap();
  57. //}
  58. public static UnityEngine.Color FloatValueToColor(float i)
  59. {
  60. return i switch
  61. {
  62. 1 => UnityEngine.Color.yellow,
  63. 2 => new UnityEngine.Color(0,1,1,1),
  64. 3 => UnityEngine.Color.green,
  65. 4 => UnityEngine.Color.white,
  66. 5 => UnityEngine.Color.red,
  67. _ => UnityEngine.Color.black,
  68. };
  69. }
  70. public ScreenIdentification()
  71. {
  72. Screen = new ScreenMap();
  73. //OnLocateScreenEnter += () => Application.targetFrameRate = 30; // 固定识别的帧率,确保摄像机拍到正确的画面
  74. //OnLocateScreenEnd += () => Application.targetFrameRate = 60;
  75. }
  76. public void SetScreenQuad(QuadrilateralInCamera quad) => Screen.QuadInCamera = quad;
  77. // 上一次半自动识别的情况,false代表这条边识别失败,线段顺序: 下、右、上、左
  78. public bool[] LastQuadSemiAutoState;
  79. public event Action OnLocateScreenEnter;
  80. public event Action OnLocateScreenEnd;
  81. public bool bStartLocateScreen { get; set; } = false;//是否进行捕获
  82. public bool SelectScreenAfterLocate(ScreenLocate.ScreenIdentificationTag tag)
  83. {
  84. QuadrilateralInCamera target = tag switch
  85. {
  86. ScreenLocate.ScreenIdentificationTag.Manual => QuadManual,
  87. ScreenLocate.ScreenIdentificationTag.SemiAuto => QuadSemiAuto,
  88. ScreenLocate.ScreenIdentificationTag.Auto => QuadAuto,
  89. _ => null
  90. };
  91. if (target == null)
  92. return false;
  93. Debug.Log($"<color=aqua>[ScreenIdentification] 选择已识别到的屏幕({Enum.GetName(typeof(ScreenLocate.ScreenIdentificationTag), tag)}), {target}</color>");
  94. SetScreenQuad(target);
  95. return true;
  96. }
  97. // 自动识别开始的入口
  98. public void LocateScreen(int Capture = 30, int Delay = 30) //数值单位是frame
  99. {
  100. if (ScreenLocate.Main.DebugScreenImages.Count != 0 && ScreenLocate.Main.DebugOnZIMDemo) // 这段仅用于测试图片
  101. {
  102. ScreenLocate.Main.CameraSize = new Geometry2D.Vector<int>(ScreenLocate.Main.DebugScreenImages[0].width, ScreenLocate.Main.DebugScreenImages[0].height);
  103. DebugImage(ScreenLocate.Main.DebugScreenImages);
  104. Screen.QuadInCamera = quadTemp[0];
  105. ScreenLocate.SetScreen(null);
  106. ScreenLocate.Main.ShowScreen(ScreenLocate.Main.ScreenQuad, Screen.QuadInCamera);
  107. delay = 0;
  108. capture = 0;
  109. ScreenWhiteTexture = null;
  110. ScreenBlackTexture = null;
  111. locateIndex = -1;
  112. areaSelected = -1;
  113. quadTemp.Clear();
  114. sumTemp.Clear();
  115. //ScreenLocate.Main.DebugScreenImages.Clear();
  116. return;
  117. }
  118. delay = Math.Max(Delay, 5);
  119. capture = Math.Max(Capture, 5);
  120. maxDelay = Delay;
  121. maxCapture = Capture;
  122. ScreenLocate.SetScreen(new Rect(0f, 0f, 1f, 1f), UnityEngine.Color.black);
  123. //ScreenLocate.SetScreen(new Rect(0f, 0f, 0.6f, 0.6f), UnityEngine.Color.white);
  124. //bStartLocateScreen = false;
  125. ScreenWhiteTexture = null;
  126. ScreenBlackTexture = null;
  127. OnLocateScreenEnter?.Invoke();
  128. }
  129. /// <summary>
  130. /// 开始进行捕获
  131. /// 初始化了两个数据 capture 和 delay
  132. /// </summary>
  133. /// <returns></returns>
  134. public bool isInitLocateScreen()
  135. {
  136. return capture != 0 && delay != 0;
  137. }
  138. void DebugImage(List<Texture2D> images)
  139. {
  140. QuadrilateralFit(images);
  141. //var watch = new System.Diagnostics.Stopwatch();
  142. //watch.Start();
  143. //var times = new List<double>() { 0.0 };
  144. #if (!NDEBUG && DEBUG && ENABLE_LOG)
  145. Console.WriteLine($"{TAG} quadTemp.Count:{quadTemp.Count}");
  146. #endif
  147. if (quadTemp.Count > 0)
  148. {
  149. ScreenLocate.Main.ShowScreen(ScreenLocate.Main.outputRawImages[4].transform.GetChild(0) as RectTransform, quadTemp[0]);
  150. // 透视变换
  151. // var srcWidth = LocateLightedRedTex.width;
  152. // var transformWidth = (int)((quad.B.x - quad.A.x + quad.D.x - quad.C.x) / 2);
  153. // var transformHeight = (int)((quad.C.y - quad.A.y + quad.D.y - quad.B.y) / 2);
  154. // var transformTex = new Texture2D(transformWidth, transformHeight);
  155. // var pt = new ZIMPerspectiveTransform(new OrdinalQuadrilateral(new Vector(0, 0), new Vector(transformWidth, 0), new Vector(0, transformHeight), new Vector(transformWidth, transformHeight)), quad);
  156. // var dstPixel = new UnityEngine.Color[transformWidth * transformHeight];
  157. // var srcPixel = LocateLightedRedTex.GetPixels();
  158. // Parallel.For(0, transformWidth, (x) =>
  159. // {
  160. // for (int y = 0; y < transformHeight; y++)
  161. // {
  162. // var index = y * transformWidth + x;
  163. // var sampleCoord = pt.TransformRound(x, y);
  164. // dstPixel[index] = srcPixel[sampleCoord.y * srcWidth + sampleCoord.x];
  165. // }
  166. // });
  167. // transformTex.SetPixels(dstPixel);
  168. // transformTex.Apply();
  169. // //ScreenLocate.DebugTexture(1, transformTex);
  170. //#if (!NDEBUG && DEBUG && ENABLE_LOG)
  171. // Console.WriteLine($"{TAG} ScreenLocate.DebugTexture 1:{transformTex.GetNativeTexturePtr()}");
  172. //#endif
  173. }
  174. //times.Add(watch.ElapsedMilliseconds);
  175. //Debug.Log("time: " + (times[times.Count - 1] - times[times.Count - 2]));
  176. }
  177. public void NextScreen()
  178. {
  179. // 只识别一次色差变化
  180. if (LocateSingleStep && areaSelected == -1)
  181. {
  182. LocateAreaData = new Rect[][] { new Rect[] { new Rect(0, 0, 1f, 1f) } };
  183. locateIndex = 3;
  184. areaSelected = 0;
  185. locateArea.AddRange(LocateAreaData[0]);
  186. }
  187. // index从-1开始
  188. locateIndex++;
  189. if (locateIndex < locateArea.Count) // 依次点亮屏幕区域
  190. {
  191. ScreenLocate.SetScreen(locateArea[locateIndex], UnityEngine.Color.white);
  192. delay = maxDelay;
  193. capture = maxCapture;
  194. }
  195. else // 退出屏幕黑白控制
  196. {
  197. ScreenLocate.SetScreen(null);
  198. ScreenLocate.Main.ShowScreen(ScreenLocate.Main.ScreenQuad, Screen.QuadInCamera);
  199. Reset();
  200. }
  201. }
  202. // 清除记录的屏幕识别数据(手动、自动等)
  203. public void ClearQuadCache()
  204. {
  205. SetScreenQuad(null);
  206. QuadManual = null;
  207. QuadSemiAuto = null;
  208. QuadAuto = null;
  209. }
  210. public void Reset()
  211. {
  212. // bStartLocateScreen = false;
  213. delay = 0;
  214. capture = 0;
  215. ScreenWhiteTexture = null;
  216. ScreenBlackTexture = null;
  217. locateIndex = -1;
  218. areaSelected = -1;
  219. if (locateArea.Count > 4)
  220. locateArea.RemoveRange(4, LocateAreaData[0].Length);
  221. quadTemp.Clear();
  222. sumTemp.Clear();
  223. }
  224. public void CaptureBlack(Texture2D cam)
  225. {
  226. if (ScreenBlackTexture == null)
  227. ScreenBlackTexture = new Geometry.Vector<float>[Size.x * Size.y];
  228. var pixel = cam.GetPixels();
  229. Parallel.For(0, Size.x * Size.y, i =>
  230. {
  231. var ip = pixel[i];
  232. ScreenBlackTexture[i] += new Geometry.Vector<float>(ip.r / maxCapture, ip.g / maxCapture, ip.b / maxCapture);
  233. });
  234. }
  235. public void CaptureWhite(Texture2D cam)
  236. {
  237. if (ScreenWhiteTexture == null)
  238. ScreenWhiteTexture = new Geometry.Vector<float>[Size.x * Size.y];
  239. var pixel = cam.GetPixels();
  240. Parallel.For(0, Size.x * Size.y, i =>
  241. {
  242. var ip = pixel[i];
  243. ScreenWhiteTexture[i] += new Geometry.Vector<float>(ip.r / maxCapture, ip.g / maxCapture, ip.b / maxCapture);
  244. });
  245. }
  246. public void CaptureStay(Texture2D cam)
  247. {
  248. if (locateIndex == -1) // 屏幕黑色
  249. {
  250. CaptureBlack(cam);
  251. }
  252. else // 屏幕部分为白色
  253. {
  254. CaptureWhite(cam);
  255. }
  256. }
  257. public void CaptureEnd()
  258. {
  259. //Debug.Log("locateIndex: " + locateIndex + ", quad: " + quadTemp.Count);
  260. if (locateIndex == -1)
  261. return;
  262. if (locateIndex < 4)
  263. {
  264. sumTemp.Add(GetBrightness());
  265. ScreenWhiteTexture = null;
  266. // 选择亮度差最大的区域
  267. if (locateIndex == 3)
  268. {
  269. areaSelected = sumTemp.MaxIndex();
  270. locateArea.AddRange(LocateAreaData[areaSelected]);
  271. }
  272. }
  273. else if (locateIndex >= 4 && locateIndex < locateArea.Count - 1)
  274. {
  275. QuadrilateralFit();
  276. ScreenWhiteTexture = null;
  277. }
  278. else
  279. {
  280. QuadrilateralFit();
  281. if (quadTemp.Count != LocateAreaData[0].Length)
  282. {
  283. Debug.Log($"<color=yellow>[ScreenIdentification] 拟合四边形失败, quadTemp.Count: {quadTemp.Count}</color>");
  284. }
  285. else if (quadTemp.Count == 1)
  286. {
  287. SetScreenQuad(quadTemp[0]);
  288. Debug.Log($"[ScreenIdentification] 拟合成功,识别数据: {Screen.QuadInCamera}");
  289. }
  290. else
  291. {
  292. // Debug.Log($"拟合四边形 2 , quadTemp.Count: {quadTemp.Count}");
  293. // 线性拟合
  294. var xValue = new List<float>() { 0 };
  295. var predicts = new List<Vector>();
  296. foreach (var i in LocateAreaData[0])
  297. xValue.Add(i.size.x);
  298. Vector baseVertex = Vector.Zero; // x==0 时的点
  299. {
  300. foreach (var q in quadTemp)
  301. {
  302. baseVertex += q.Quad[areaSelected];
  303. }
  304. baseVertex /= quadTemp.Count;
  305. }
  306. double rs = 0.0;
  307. for (int i = 0; i < 4; i++)
  308. {
  309. if (i == areaSelected)
  310. {
  311. predicts.Add(baseVertex);
  312. }
  313. else
  314. {
  315. var yValue = new List<Vector>() { baseVertex };
  316. foreach (var q in quadTemp)
  317. {
  318. yValue.Add(q.Quad[i]);
  319. }
  320. var lr = LinerRegression1D.Fit(2, xValue.ToArray(), yValue.ToArray());
  321. rs += lr.RSquared / 3;
  322. predicts.Add(lr.Predict<Vector>(1));
  323. }
  324. }
  325. SetScreenQuad(new QuadrilateralInCamera(predicts, new Vector(Size.x, Size.y)));
  326. Debug.Log($"[ScreenIdentification] 拟合成功,RSquared: {rs}, Quad: {Screen.QuadInCamera}");
  327. //if (rs < 0.8) Screen.Quad = null;
  328. }
  329. OnLocateScreenEnd?.Invoke();
  330. }
  331. }
  332. public bool Update(Texture2D cam)
  333. {
  334. //if (!bStartLocateScreen) return false;
  335. if (delay != 0)
  336. {
  337. //ScreenLocate.Main.CreateUVCTexture2DFocusSizeIfNeeded(1280, 720);
  338. delay--;
  339. if (delay == 0)
  340. {
  341. ScreenLocate.Main.CameraSize = new Geometry2D.Vector<int>(cam.width, cam.height); // 记录当前的分辨率
  342. Debug.Log("[ScreenIdentification] 采样纹理,记录采样分辨率: [" + Size.x + ", " + Size.y + "]");
  343. }
  344. return true;
  345. }
  346. if (capture != 0)
  347. {
  348. //ScreenLocate.Main.CreateUVCTexture2DFocusSizeIfNeeded(1280, 720);
  349. CaptureStay(cam);
  350. capture--;
  351. if (capture == 0)
  352. {
  353. CaptureEnd();
  354. NextScreen();
  355. }
  356. return true;
  357. }
  358. return false;
  359. #region Old
  360. /*
  361. if (delay != 0)
  362. {
  363. delay--;
  364. return true;
  365. }
  366. if (capture != 0)
  367. {
  368. capture--;
  369. if (ScreenBlackTexture == null)
  370. ScreenBlackTexture = new Geometry.Vector<float>[Size.x * Size.y];
  371. var pixel = cam.GetPixels();
  372. Parallel.For(0, Size.x * Size.y, i =>
  373. {
  374. var ip = pixel[i];
  375. ScreenBlackTexture[i] += new Geometry.Vector<float>(ip.r, ip.g, ip.b);
  376. });
  377. if (capture == 0)
  378. ScreenLocate.SetScreen(UnityEngine.Color.black);
  379. return true;
  380. }
  381. if (delay != 0)
  382. {
  383. delay--;
  384. return true;
  385. }
  386. if (capture != 0)
  387. {
  388. capture--;
  389. if (ScreenWhiteTexture == null)
  390. ScreenWhiteTexture = new Geometry.Vector<float>[Size.x * Size.y];
  391. var pixel = cam.GetPixels();
  392. Parallel.For(0, Size.x * Size.y, i =>
  393. {
  394. var ip = pixel[i];
  395. ScreenWhiteTexture[i] += new Geometry.Vector<float>(ip.r, ip.g, ip.b);
  396. });
  397. if (capture == 0)
  398. ScreenLocate.SetScreen(UnityEngine.Color.black);
  399. return true;
  400. }
  401. if (delay != 0)
  402. {
  403. delay--;
  404. return true;
  405. }
  406. if (capture != 0)
  407. {
  408. capture--;
  409. var pixel = cam.GetPixels();
  410. Parallel.For(0, Size.x * Size.y, i =>
  411. {
  412. var ip = pixel[i];
  413. ScreenWhiteTexture[i] -= new Geometry.Vector<float>(ip.r, ip.g, ip.b);
  414. });
  415. if (capture == 0)
  416. {
  417. ScreenLocate.SetScreen(null);
  418. UnityEngine.Color[] newPixel = new UnityEngine.Color[Size.x * Size.y];
  419. Parallel.For(0, Size.x * Size.y, i => {
  420. var pi = ScreenWhiteTexture[i] /= capture;
  421. newPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z);
  422. });
  423. //读取数据
  424. //{
  425. // var fileName = "3.bin";
  426. // ScreenLocateTexture = $"2023 04 16 厦门测试数据/{fileName}".FileReadByte<Vector<float>[]>();
  427. // Debug.Log($"Read {fileName}");
  428. // Parallel.For(0, Size.x * Size.y, i =>
  429. // {
  430. // var pi = ScreenLocateTexture[i];
  431. // newPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z);
  432. // });
  433. //}
  434. var ScreenLocateTex = new Texture2D(Size.x, Size.y);
  435. ScreenLocateTex.SetPixels(newPixel);
  436. ScreenLocateTex.Apply();
  437. //ScreenLocate.DebugTexture(2, ScreenLocateTex);
  438. var ScreenLocateTexLighted = ScreenLocateTex.AutoLight(10);
  439. //ScreenLocate.DebugTexture(2, ScreenLocateTexLighted);
  440. //var FileSavePath = Application.persistentDataPath + "/ScreenLocateTexture.bin";
  441. bool Save = ScreenLocate.Main.SaveToggle.isOn;
  442. string time;
  443. if (Save)
  444. {
  445. time = DateTime.Now.ToString("yyyyMMdd_HHmmss");
  446. var FileSavePath = $"屏幕定位数据{time}.bin";
  447. FileSavePath.FileWriteByte(ScreenWhiteTexture);
  448. var bytes = ScreenLocateTexLighted.EncodeToPNG();
  449. File.WriteAllBytes($"屏幕定位数据{time}.png", bytes);
  450. Debug.Log("ScreenLocateTexture Saved To: " + FileSavePath);
  451. }
  452. var ScreenLocateTexR = ScreenLocateTexLighted.ToRGB(ColorChannel.Red);
  453. var ScreenLocateTexG = ScreenLocateTexLighted.ToRGB(ColorChannel.Green);
  454. var ScreenLocateTexB = ScreenLocateTexLighted.ToRGB(ColorChannel.Blue);
  455. ScreenLocate.DebugTexture(2, ScreenLocateTexR);
  456. //ScreenLocate.DebugTexture(4, ScreenLocateTexG);
  457. //ScreenLocate.DebugTexture(5, ScreenLocateTexB);
  458. var watch = new System.Diagnostics.Stopwatch();
  459. watch.Start();
  460. var times = new List<double>() { 0.0 };
  461. var ScreenLocateTexLightedMat = ScreenLocateTexLighted.Too0Mat();
  462. //var ScreenLocateTexLightedMat = texture.Too0Mat();
  463. //var (edge, edgeDir) = ScreenLocateTexLightedMat.IdentifyEdge();
  464. var (edge, edgeDir) = ScreenLocateTexLightedMat.zimIdentifyEdgeGradientAny(15);
  465. //ScreenLocate.DebugTexture(4, ScreenLocateTexLighted.Too0Mat().IdentifyEdgeGradient().ToTex());
  466. //ScreenLocate.DebugTexture(4, edge.ToTex());
  467. var quadLines = ScreenLocateTexLightedMat.IdentifyQuadLSD(edge, edgeDir, out List<Line> lightLines, 30);
  468. var drawLineMap = new MatrixF2D(edge..Size.x, edge.Size.y);
  469. int lineCount = 0;
  470. foreach (var l in quadLines)
  471. {
  472. if (l != null)
  473. {
  474. o0Extension.DrawLine(drawLineMap.DrawLine(l, (x, y) => 1, new Geometry2D.Float.Vector(0, 10));
  475. lineCount++;
  476. }
  477. }
  478. if (lineCount == 4)
  479. {
  480. var a = quadLines[0].Intersect(quadLines[3], false).Value;
  481. var b = quadLines[0].Intersect(quadLines[1], false).Value;
  482. var c = quadLines[2].Intersect(quadLines[3], false).Value;
  483. var d = quadLines[1].Intersect(quadLines[2], false).Value;
  484. Quad = new Quadrilateral(a, b, c, d);
  485. if (!Quad.IsInScreen(ScreenLocate.Main.WebCamera.Size))
  486. Quad = null;
  487. }
  488. ScreenLocate.Main.ShowScreen(Quad);
  489. //var lines = edge.IdentifyLineLSD(edgeDir, 100);
  490. ////var lines = ScreenLocateTexLightedMat.IdentifyLineLSD();
  491. //var drawLineMap = new MatrixF2D(edge..Size.x, edge.Size.y);
  492. //var returnMaxLines = lines.Sub(0, 10);
  493. //foreach (var (line, sum, gradient) in returnMaxLines)
  494. // o0Extension.DrawLine(drawLineMap.DrawLine(line, (x, y) => 1, new Geometry2D.Float.Vector(0, 10));
  495. ScreenLocate.DebugTexture(3, drawLineMap.ToTex());
  496. //{
  497. // var bytes = drawLineMap.ToTex().EncodeToPNG();
  498. // File.WriteAllBytes($"屏幕定位数据DrawLineMap.png", bytes);
  499. //}
  500. times.Add(watch.ElapsedMilliseconds);
  501. Debug.Log("time: " + (times[times.Count - 1] - times[times.Count - 2]));
  502. //ScreenLocate.DebugTexture(5, edge.IdentifyLine(edgeDir).ToTex());
  503. //ScreenLocate.DebugTexture(4, ScreenLocateTexLighted.Too0Mat().IdentifyEdgeGradientX().ToTex());
  504. //ScreenLocate.DebugTexture(5, ScreenLocateTexLighted.Too0Mat().IdentifyEdgeGradientY().ToTex());
  505. //var convolutionLighted2 = ScreenLocateTexLighted.Too0Mat().IdentifyEdgeVariance().ToTex();
  506. // opecncv处理
  507. // zim
  508. {
  509. //var cvLines = edge.cvHoughLinesP();
  510. //ScreenLocate.DebugTexture(5, cvLines);
  511. //var myLines = Hough.Transform(edgeMat);
  512. //var cvLines = edge.cvLine(myLines);
  513. //ScreenLocate.DebugTexture(5, cvLines);
  514. }
  515. UnityEngine.Object.Destroy(ScreenLocateTex);
  516. //ScreenLocate.DebugTexture(4, convolutionLighted2);
  517. }
  518. return true;
  519. }
  520. /*
  521. var avg = new Geometry4D.Vector<float>();
  522. var pixel = texture.GetPixels();
  523. foreach(var i in pixel.Index())
  524. {
  525. var iP = pixel[i];
  526. avg += new Geometry4D.Vector<float>(iP.r, iP.g, iP.b, iP.a);
  527. }
  528. avg /= pixel.Count();
  529. /*
  530. var (texLightedR, texLightedG, texLightedB) = ToRGB(newTex);
  531. ScreenLocate.DebugTexture(3, texLightedR);
  532. ScreenLocate.DebugTexture(4, texLightedG);
  533. ScreenLocate.DebugTexture(5, texLightedB);
  534. //Debug.Log(avg);
  535. return false;
  536. /**/
  537. #endregion
  538. }
  539. float GetBrightness()
  540. {
  541. UnityEngine.Color[] differPixel = new UnityEngine.Color[Size.x * Size.y];
  542. Parallel.For(0, Size.x * Size.y, i =>
  543. {
  544. var pi = ScreenWhiteTexture[i] - ScreenBlackTexture[i];
  545. differPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z);
  546. });
  547. var sum = 0f;
  548. foreach (var i in differPixel)
  549. {
  550. sum += i.Brightness();
  551. }
  552. sum /= differPixel.Length;
  553. //Debug.Log(sum);
  554. return sum;
  555. }
  556. // 转换成屏幕定位所需的纹理图像
  557. Texture2D ToLocateTex(UnityEngine.Color[] pixels)
  558. {
  559. var ScreenLocateTex = new Texture2D(Size.x, Size.y);
  560. ScreenLocateTex.SetPixels(pixels);
  561. ScreenLocateTex.Apply();
  562. //ScreenLocate.DebugTexture(2, ScreenLocateTex);
  563. return ScreenLocateTex.AutoLight(10);
  564. //ScreenLocate.DebugTexture(2, ScreenLocateTexLighted);
  565. //var ScreenLocateTexR = ToLocateTex.ToRGB(ColorChannel.Red);
  566. //var ScreenLocateTexG = ToLocateTex.ToRGB(ColorChannel.Green);
  567. //var ScreenLocateTexB = ToLocateTex.ToRGB(ColorChannel.Blue);
  568. //LocateLightedRedTex = ScreenLocateTexR;
  569. //ScreenLocate.DebugTexture(2, ScreenLocateTexR);
  570. //ScreenLocate.DebugTexture(4, ScreenLocateTexG);
  571. //ScreenLocate.DebugTexture(5, ScreenLocateTexB);
  572. //var ScreenLocateTexLightedMat = texture.Too0Mat();
  573. }
  574. /// <param name="lineWidth">识别的最小线段长度</param>
  575. /// <param name="debugImages">这个参数如果不为null且数量大于0,则执行debug操作</param>
  576. void QuadrilateralFit(List<Texture2D> debugImages = null)
  577. {
  578. // 屏幕黑白差值,存放多批次的图像用于识别, 该List数量不能等于 0
  579. List<UnityEngine.Color[]> PixelsMultipleBatches = new List<UnityEngine.Color[]>();
  580. //var sw = new System.Diagnostics.Stopwatch();
  581. //sw.Start();
  582. //读取数据
  583. int batchCount;
  584. if (debugImages != null && debugImages.Count != 0)
  585. {
  586. batchCount = debugImages.Count;
  587. var dSize = debugImages.First().Size();
  588. foreach (var i in debugImages)
  589. {
  590. Debug.Log($"<color=aqua>Debug {i.name}</color>");
  591. if (i.Size() != dSize)
  592. throw new InvalidOperationException("Multiple Debug textures have different sizes");
  593. //PixelsMultipleBatches.Add(i.GetPixels());
  594. }
  595. }
  596. else // 获得屏幕差值
  597. {
  598. var differPixel = new UnityEngine.Color[Size.x * Size.y];
  599. var whitePixel = new UnityEngine.Color[Size.x * Size.y];
  600. Parallel.For(0, Size.x, x =>
  601. {
  602. for (int y = 0; y < Size.y; y++)
  603. {
  604. var i = y * Size.x + x;
  605. var d = ScreenWhiteTexture[i] - ScreenBlackTexture[i];
  606. differPixel[i] = new UnityEngine.Color(d.x, d.y, d.z);
  607. whitePixel[i] = new UnityEngine.Color(ScreenWhiteTexture[i].x, ScreenWhiteTexture[i].y, ScreenWhiteTexture[i].z);
  608. }
  609. });
  610. PixelsMultipleBatches.Add(differPixel); // 色差图
  611. PixelsMultipleBatches.Add(whitePixel); // 原图
  612. batchCount = PixelsMultipleBatches.Count;
  613. }
  614. int conSize = (int)Math.Ceiling(0.007f * Size.y) * 2 + 1;
  615. conSize = Math.Max(conSize, 7); // 设置最小为7
  616. float minLength = conSize * 7.7f;
  617. minLength = locateIndex == -1 ? minLength : minLength * areaPercent; // minLength需要按areaPercent比例缩小
  618. string log = $"[Log][ScreenLocate Auto] Size: ({Size.x},{Size.y}), 卷积核Size: {conSize}, 最小线段长度: {minLength}";
  619. var allLines = new List<LineIdentified>();
  620. List<Texture2D> LocateTexTemp = new List<Texture2D>();
  621. List<Matrix> ScreenLocateMatList = new List<Matrix>();
  622. for (int batch = 0; batch < batchCount; batch++)
  623. {
  624. Texture2D locateTex;
  625. if (debugImages != null && debugImages.Count != 0)
  626. locateTex = debugImages[batch];
  627. else
  628. locateTex = ToLocateTex(PixelsMultipleBatches[batch]);
  629. LocateTexTemp.Add(locateTex);
  630. var ScreenLocateMat = locateTex.Too0Mat(); // 用于获取Lines的Matrix
  631. var lineCount = ZIMIdentifyQuadLSD(
  632. ref allLines,
  633. batch,
  634. ScreenLocateMat.zimIdentifyEdgeGradientAny(conSize),
  635. minLength,
  636. new Vector(minLength * 0.4f, conSize * 1.6f));
  637. log += $"\r\n识别图片{batch}, 识别到的线段数量为: {lineCount}";
  638. ScreenLocateMatList.Add(ScreenLocateMat);
  639. }
  640. Texture2D ScreenLocateTexture = LocateTexTemp[0]; // 色差图,for output
  641. // LSD计算得到的矩阵尺寸较小(因为卷积),这里必须进行位移
  642. // 新增:根据阈值筛去梯度太低的线段
  643. float minGradient = 0.08f;
  644. var offset = new Vector((conSize - 1) / 2, (conSize - 1) / 2);
  645. var tempList = new List<LineIdentified>();
  646. for (int i = 0; i < allLines.Count; i++)
  647. {
  648. var l = allLines[i];
  649. if (l.Gradient > minGradient * l.Line.Length)
  650. {
  651. l.Offset(offset);
  652. tempList.Add(l);
  653. }
  654. }
  655. allLines = tempList;
  656. log += $"\r\n根据梯度阈值筛选,最终线段数量为: {allLines.Count}";
  657. // 如果有手动数据,刷新一下Size
  658. QuadManual?.ReSize(new Vector(Size.x, Size.y), ScreenMap.ViewAspectRatioSetting);
  659. // 估算屏幕中点,如果已有手动定位数据,根据现有数据取平均即可,否则从色差计算,ScreenLocateMatList[0]默认是屏幕的黑白色差
  660. Vector AvgPoint = QuadManual != null ? QuadManual.Quad.Centroid : GetAvgPoint(ScreenLocateMatList[0]);
  661. // 过滤得到四边形的四条边,
  662. var (quadLinesSemiAuto, quadLinesAuto) = FilterLines(
  663. ScreenLocateMatList,
  664. allLines,
  665. AvgPoint,
  666. out LineIdentified[] manualLines,
  667. out List<LineIdentified> possibleLines,
  668. conSize,
  669. minLength);
  670. #region 全自动识别的结果
  671. List<LineIdentified> LineIdentifiedAuto = new List<LineIdentified>(); // 线段顺序: 下、右、上、左
  672. for (int i = 0; i < 4; i++)
  673. {
  674. if (quadLinesAuto[i] != null)
  675. LineIdentifiedAuto.Add(quadLinesAuto[i]);
  676. }
  677. if (LineIdentifiedAuto.Count == 4) // 判断识别的线段能否拼成屏幕,能拼成则记录
  678. {
  679. var a = LineIdentifiedAuto[0].Line.Intersect(LineIdentifiedAuto[3].Line, false).Value;
  680. var b = LineIdentifiedAuto[0].Line.Intersect(LineIdentifiedAuto[1].Line, false).Value;
  681. var c = LineIdentifiedAuto[2].Line.Intersect(LineIdentifiedAuto[3].Line, false).Value;
  682. var d = LineIdentifiedAuto[1].Line.Intersect(LineIdentifiedAuto[2].Line, false).Value;
  683. QuadAuto = new QuadrilateralInCamera(a, b, c, d, new Vector(Size.x, Size.y));
  684. if (!QuadAuto.IsQuadComplete())
  685. QuadAuto = null;
  686. }
  687. #endregion
  688. #region 半自动识别
  689. List<LineIdentified> LineIdentifiedSemiAuto = new List<LineIdentified>(); // 线段顺序: 下、右、上、左
  690. LastQuadSemiAutoState = new bool[4] { true, true, true, true };
  691. for (int i = 0; i < 4; i++)
  692. {
  693. if (quadLinesSemiAuto[i] != null)
  694. LineIdentifiedSemiAuto.Add(quadLinesSemiAuto[i]);
  695. else if (manualLines != null)
  696. {
  697. LineIdentifiedSemiAuto.Add(manualLines[i]);
  698. LastQuadSemiAutoState[i] = false;
  699. }
  700. }
  701. if (LineIdentifiedSemiAuto.Count == 4) // 判断识别的线段能否拼成屏幕,能拼成则记录
  702. {
  703. var a = LineIdentifiedSemiAuto[0].Line.Intersect(LineIdentifiedSemiAuto[3].Line, false).Value;
  704. var b = LineIdentifiedSemiAuto[0].Line.Intersect(LineIdentifiedSemiAuto[1].Line, false).Value;
  705. var c = LineIdentifiedSemiAuto[2].Line.Intersect(LineIdentifiedSemiAuto[3].Line, false).Value;
  706. var d = LineIdentifiedSemiAuto[1].Line.Intersect(LineIdentifiedSemiAuto[2].Line, false).Value;
  707. QuadSemiAuto = new QuadrilateralInCamera(a, b, c, d, new Vector(Size.x, Size.y));
  708. if (!QuadSemiAuto.IsQuadComplete())
  709. QuadSemiAuto = null;
  710. }
  711. #endregion
  712. // 优先应用自动的结果(也可以在外部手动设置)
  713. if (QuadSemiAuto == null && QuadAuto == null && Screen.QuadInCamera != null) // 如果可能,回退到上一个screen
  714. {
  715. Debug.Log($"<color=yellow>[ScreenIdentification] 本次识别失败,回退到上次的识别结果: {Screen.QuadInCamera}</color>");
  716. quadTemp.Add(Screen.QuadInCamera);
  717. }
  718. else if (QuadAuto != null)
  719. {
  720. Debug.Log($"<color=aqua>[ScreenIdentification] 识别到四边形(全自动): {QuadAuto}</color>");
  721. quadTemp.Add(QuadAuto);
  722. }
  723. else if (QuadSemiAuto != null)
  724. {
  725. Debug.Log($"<color=aqua>[ScreenIdentification] 识别到四边形(半自动): {QuadSemiAuto}</color>");
  726. quadTemp.Add(QuadSemiAuto);
  727. }
  728. #region 绘制 output texture
  729. // 绘制半自动
  730. var ScreenQuadMap = new Matrix(Size, Tiling: true); // 识别的到的屏幕四边形(半自动和自动在一张图上)
  731. foreach (var i in LineIdentifiedSemiAuto.Index())
  732. {
  733. if (LastQuadSemiAutoState[i])
  734. o0Extension.DrawLine(ScreenQuadMap, LineIdentifiedSemiAuto[i].DrawLine, (x, y) => 5, new Geometry2D.Float.Vector(0, 10));
  735. else
  736. o0Extension.DrawLine(ScreenQuadMap, LineIdentifiedSemiAuto[i].DrawLine, (x, y) => 3, new Geometry2D.Float.Vector(0, 6), true);
  737. }
  738. // 绘制全自动
  739. foreach (var i in LineIdentifiedAuto.Index())
  740. o0Extension.DrawLine(ScreenQuadMap, LineIdentifiedAuto[i].DrawLine, (x, y) => 4, new Geometry2D.Float.Vector(0, 4), true);
  741. Texture2D ScreenQuad = ScreenQuadMap.ToTexRGBA(FloatValueToColor);
  742. Texture2D ScreenQuadWithScreen = ScreenQuad.Overlay(ScreenLocateTexture); // 叠加屏幕色差图
  743. // 绘制allLines
  744. var allLinesMap = new Matrix(Size, Tiling: true);
  745. foreach (var l in allLines)
  746. {
  747. if (l.DrawLine != null)
  748. o0Extension.DrawLine(allLinesMap, l.DrawLine, (x, y) => 1, new Geometry2D.Float.Vector(0, 2), true);
  749. }
  750. var allLinesTex = allLinesMap.ToTexRGBA(FloatValueToColor);
  751. ScreenLocate.DebugTexture(1, allLinesTex);
  752. // 还需要输出一张识别结果图,包含干扰线段
  753. var ChoosableLineMap = new Matrix(Size, Tiling: true);
  754. foreach (var l in possibleLines)
  755. {
  756. if (l != null && !quadLinesSemiAuto.Contains(l) && !manualLines.Contains(l))
  757. o0Extension.DrawLine(ChoosableLineMap, l.DrawLine, (x, y) => 1, new Geometry2D.Float.Vector(0, 2), true); // 其他的备选线段
  758. }
  759. foreach (var l in LineIdentifiedSemiAuto)
  760. {
  761. if (l != null)
  762. o0Extension.DrawLine(ChoosableLineMap, l.DrawLine, (x, y) => 5, new Geometry2D.Float.Vector(0, 5)); // 识别的结果
  763. }
  764. if (manualLines != null)
  765. {
  766. foreach (var l in manualLines)
  767. o0Extension.DrawLine(ChoosableLineMap, l.DrawLine, (x, y) => 3, new Geometry2D.Float.Vector(0, 2), true); // 旧的屏幕线段(例如上次手动识别的)
  768. }
  769. Texture2D ChoosableLineTex = ChoosableLineMap.ToTexRGBA(FloatValueToColor);
  770. #endregion
  771. log += $"\r\n屏幕四边形_手动识别{QuadManual != null}\r\n屏幕四边形_半自动识别{QuadSemiAuto != null}\r\n屏幕四边形_全自动识别{QuadAuto != null}";
  772. Debug.Log(log);
  773. // 是否将图片保存到本地
  774. if (ScreenLocate.Main.SaveToggle?.isOn ?? false && ScreenLocate.Main.DebugOnZIMDemo)
  775. {
  776. var FileDirectory = $"Debug_屏幕定位/";
  777. SaveImages(FileDirectory, log, ScreenLocateTexture, allLinesTex, ChoosableLineTex, ScreenQuad);
  778. }
  779. //times.Add(watch.ElapsedMilliseconds);
  780. //Debug.Log("time: " + (times[times.Count - 1] - times[times.Count - 2]));
  781. // opecncv处理, zim
  782. {
  783. //var cvLines = edge.cvHoughLinesP();
  784. //ScreenLocate.DebugTexture(5, cvLines);
  785. //var myLines = Hough.Transform(edgeMat);
  786. //var cvLines = edge.cvLine(myLines);
  787. //ScreenLocate.DebugTexture(5, cvLines);
  788. }
  789. {
  790. ScreenLocate.DebugTexture(2, ScreenLocateTexture);
  791. ScreenLocate.DebugTexture(3, ScreenQuad);
  792. ScreenLocate.DebugTexture(4, ScreenQuadWithScreen);
  793. ScreenLocate.DebugTexture(5, ChoosableLineTex);
  794. }
  795. foreach (var i in LocateTexTemp)
  796. {
  797. if (i != ScreenLocateTexture) // ScreenLocateTexture 由 ScreenLocate.DebugTexture 释放
  798. GameObject.Destroy(i);
  799. }
  800. }
  801. Vector GetAvgPoint(Matrix screenLocateMat)
  802. {
  803. // 加权平均
  804. Vector[] avgPointsColumn = new Vector[screenLocateMat.Size.x];
  805. float[] valueSumsColumn = new float[screenLocateMat.Size.x];
  806. Parallel.For(0, screenLocateMat.Size.x, i =>
  807. {
  808. for (int j = 0; j < screenLocateMat.Size.y; j++)
  809. {
  810. var value = screenLocateMat[i, j];
  811. valueSumsColumn[i] += value;
  812. avgPointsColumn[i] += new Vector(i, j) * value;
  813. }
  814. });
  815. Vector avgPoint = Vector.Zero;
  816. var valueSum = 0f;
  817. for (int i = 0; i < screenLocateMat.Size.x; i++)
  818. {
  819. avgPoint += avgPointsColumn[i];
  820. valueSum += valueSumsColumn[i];
  821. }
  822. avgPoint /= valueSum;
  823. return avgPoint;
  824. }
  825. // 返回查找到的线段数量,0是查找失败
  826. int ZIMIdentifyQuadLSD(ref List<LineIdentified> allLines, int batch, (Matrix edgeMat, Matrix edgeDirMat) edgeGradient,
  827. float minLength, Vector LineCaptureSize)
  828. {
  829. var l = edgeGradient.edgeMat.IdentifyLineLSD(edgeGradient.edgeDirMat, minLength, 25, LineCaptureSize);
  830. if (l == null || l.Count == 0)
  831. return 0;
  832. allLines.AddRange(l.Select((i) => new LineIdentified(batch, i)));
  833. return l.Count;
  834. }
  835. // 返回四边形的四条边(半自动、全自动),List长度一定是4 (如果没有识别到就是null),且线段顺序是: 下、右、上、左
  836. (List<LineIdentified>, List<LineIdentified>) FilterLines(List<Matrix> screenLocateMatList, List<LineIdentified> allLines, Vector avgPoint,
  837. out LineIdentified[] manualLines, out List<LineIdentified> possibleLines, float gradientLength, float minLength = 100)
  838. {
  839. // 筛掉椭圆框外的线段(超出一半会筛掉)
  840. var innerLines = new List<LineIdentified>();
  841. for (int i = 0; i < allLines.Count; i++)
  842. {
  843. List<Vector> InArea = new List<Vector>();
  844. var dir = (allLines[i].Line.B - allLines[i].Line.A) / 4;
  845. var points = new Vector[5] { allLines[i].Line.A, allLines[i].Line.A + dir, allLines[i].Line.A + dir * 2f, allLines[i].Line.A + dir * 3f, allLines[i].Line.B }; // A点、中间的点、B点
  846. for (int pI = 0; pI < points.Length; pI++)
  847. {
  848. if (!ScreenLocate.Main.ScreenPixelCheaker.OutArea2D(points[pI], Size))
  849. InArea.Add(points[pI]);
  850. }
  851. if (InArea.Count < 2) // 少于2个点在内部
  852. continue;
  853. else if (InArea.Count < points.Length) // 不完全在内部
  854. allLines[i].DrawLine = new Line(InArea.First(), InArea.Last()); // 将部分线条设置为drawline,用于下一步的绘制
  855. else // 线段全部在椭圆内
  856. allLines[i].DrawLine = allLines[i].Line;
  857. innerLines.Add(allLines[i]);
  858. }
  859. // 角度阈值,用来判断线段的梯度方向是否指向屏幕中心(avgPoint)
  860. var avaAngleHalf = 75f;
  861. // 评估屏幕内部的Line
  862. var interLineGuess = new InterLineGuess(screenLocateMatList, gradientLength * 2, minLength);
  863. #region 内部函数
  864. float ScreenGrad(LineIdentified line)
  865. {
  866. var dir = (line.Line.B - line.Line.A).Normalized;
  867. var vertical = new Vector(-dir.y, dir.x) * (gradientLength / 2);
  868. int step = (int)(minLength / 5);
  869. var ll = line.Line.Length;
  870. var lg = new List<float>();
  871. for (int i = 0; i <= ll; i += step)
  872. {
  873. var point = line.Line.A + dir * i;
  874. var ga = point + vertical;
  875. var gb = point - vertical;
  876. lg.Add(screenLocateMatList[line.Batch][(int)ga.x, (int)ga.y] - screenLocateMatList[line.Batch][(int)gb.x, (int)gb.y]);
  877. }
  878. return Math.Abs(lg.Mean());
  879. }
  880. // 沿直线计算综合梯度(梯度乘以长度系数,再乘以距离系数), distanceRatio是实际距离除以距离阈值
  881. float estimateGradient(LineIdentified line, float distanceRatio)
  882. {
  883. var gM = ScreenGrad(line);
  884. if (line.Batch > 0) // 其他batch的图,梯度权重小
  885. gM /= 3;
  886. float e = (float)Math.Sqrt(Math.Ceiling(line.Line.Length / minLength)); // 长度系数,筛选时梯度更大、长度更长的线段更优
  887. float d = (10f - distanceRatio * distanceRatio) / 10f; // 距离系数,距离越近,系数越大
  888. line.ZIMGradient = e * gM + d; // 记录一下综合梯度,新增的识别黑边功能会二次使用
  889. return line.ZIMGradient;
  890. }
  891. // 根据线段梯度的角度,判断是不是屏幕的边,index代表是哪条边(顺序是: 下、右、上、左)
  892. void GetScreenLineIndex(LineIdentified line)
  893. {
  894. var a = (avgPoint - (line.Line.A + line.Line.B) / 2).DegreeToXAxis();
  895. //Debug.Log(a + ", " + gradient + ", " + sum);
  896. var index = -1;
  897. if (Math.Abs(a - line.GradientDegree) < avaAngleHalf || Math.Abs(a - 360 - line.GradientDegree) < avaAngleHalf || Math.Abs(a + 360 - line.GradientDegree) < avaAngleHalf)
  898. {
  899. if (line.GradientDegree > 45 && line.GradientDegree < 135) // 下
  900. index = 0;
  901. else if (line.GradientDegree > 135 && line.GradientDegree < 225) // 右
  902. index = 1;
  903. else if (line.GradientDegree > 225 && line.GradientDegree < 315) // 上
  904. index = 2;
  905. else
  906. index = 3;
  907. }
  908. line.ScreenLineIndex = index;
  909. }
  910. #endregion
  911. // 根据梯度方向,判断是哪条边
  912. foreach (var l in innerLines)
  913. GetScreenLineIndex(l);
  914. // 下、右、上、左, 半自动和自动
  915. var quadLinesSemiAuto = new List<(float, LineIdentified)>[4] { new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>() };
  916. var quadLinesAuto = new List<(float, LineIdentified)>[4] { new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>() };
  917. possibleLines = new List<LineIdentified>();
  918. #region 半自动(利用手动数据)
  919. // 如果已有手动定位数据,根据现有数据筛选线条(半自动)
  920. manualLines = null;
  921. if (QuadManual != null)
  922. {
  923. Debug.Log($"[IdentifyLineLSD] 根据已有定位数据做筛选, QuadManual: {QuadManual}");
  924. manualLines = QuadManual.GetLines().Select((i) => new LineIdentified(0, i, 0, 0, true)).ToArray();
  925. var calibration = ScreenLocate.Main.ReDoLocateCalibrationRatio * Size.y;
  926. var distanceMeasure = 0.02f * Size.y;
  927. var avgPointCross = manualLines.Select((i) => i.Line.LineCrossWithPoint(avgPoint)).ToArray(); // 对于平均点的corss值
  928. var avgPointPedal = manualLines.Select((i) => o0Extension.PointPedal(i.Line, avgPoint, out _)).ToArray(); // 当前定位的垂足,下、右、上、左
  929. foreach (var line in innerLines)
  930. {
  931. // 筛选条件:1-梯度方向匹配,2-垂足的距离足够近, 3-线段的AB点均在旧线段外部, 4-新的线段的中点,到旧线段的垂足,要在旧线段内
  932. if (line.ScreenLineIndex >= 0)
  933. {
  934. var distanceToOld = (o0Extension.PointPedal(line.Line, avgPoint, out _) - avgPointPedal[line.ScreenLineIndex]).Length;
  935. if (distanceToOld < calibration &&
  936. manualLines[line.ScreenLineIndex].Line.LineCrossWithPoint(line.Line.A) * avgPointCross[line.ScreenLineIndex] <= 0 &&
  937. manualLines[line.ScreenLineIndex].Line.LineCrossWithPoint(line.Line.B) * avgPointCross[line.ScreenLineIndex] <= 0)
  938. {
  939. var middleToOldLine = o0Extension.PointPedal(manualLines[line.ScreenLineIndex].Line, (line.Line.A + line.Line.B) / 2, out bool inLineSegment);
  940. if (inLineSegment)
  941. {
  942. quadLinesSemiAuto[line.ScreenLineIndex].Add((estimateGradient(line, (float)Math.Floor(distanceToOld / distanceMeasure)), line));
  943. possibleLines.Add(line);
  944. }
  945. }
  946. }
  947. }
  948. }
  949. // 获得结果
  950. var resultSemiAuto = new LineIdentified[4];
  951. var resultSemiAutoPedal = new Vector[4]; // 用于找平行线
  952. for (int i = 0; i < 4; i++)
  953. {
  954. resultSemiAuto[i] = quadLinesSemiAuto[i].Max((a, b) => a.Item1.CompareTo(b.Item1)).Item2;
  955. if (resultSemiAuto[i] != null)
  956. resultSemiAutoPedal[i] = o0Extension.PointPedal(resultSemiAuto[i].Line, avgPoint, out _);
  957. }
  958. // 新增功能(解决黑边问题):根据 result 再找平行线,判断是否替换(1-在 result 内部,且离中点最近,2-接近平行)
  959. UpdateResultlines(resultSemiAuto, FindInterLinePair(
  960. interLineGuess,
  961. GetInterSelectableLines(quadLinesSemiAuto, resultSemiAuto, resultSemiAutoPedal, avgPoint)));
  962. #endregion
  963. #region 全自动
  964. // 全自动
  965. foreach (var line in innerLines)
  966. {
  967. if (line.ScreenLineIndex >= 0 && line.Batch < 1) // 全自动只处理第一张图,默认是色差图
  968. {
  969. quadLinesAuto[line.ScreenLineIndex].Add((estimateGradient(line, 1), line));
  970. }
  971. }
  972. // 获得结果
  973. var resultAuto = new LineIdentified[4];
  974. var resultAutoPedal = new Vector[4]; // 用于找平行线
  975. for (int i = 0; i < 4; i++)
  976. {
  977. resultAuto[i] = quadLinesAuto[i].Max((a, b) => a.Item1.CompareTo(b.Item1)).Item2;
  978. if (resultAuto[i] != null)
  979. resultAutoPedal[i] = o0Extension.PointPedal(resultAuto[i].Line, avgPoint, out _);
  980. }
  981. // 新增功能(解决黑边问题):根据 resultAuto 再找平行线,判断是否替换(1-在 result 内部,且离中点最近,2-接近平行,3-LineGuess判断是直线)
  982. UpdateResultlines(resultAuto, FindInterLinePair(
  983. interLineGuess,
  984. GetInterSelectableLines(quadLinesAuto, resultAuto, resultAutoPedal, avgPoint)));
  985. #endregion
  986. return (resultSemiAuto.ToList(), resultAuto.ToList());
  987. }
  988. List<LineIdentified> GetInterSelectableLines(List<(float, LineIdentified)>[] quadLines, LineIdentified[] resultLines, Vector[] resultPedal, Vector avgPoint)
  989. {
  990. var result = new List<LineIdentified>();
  991. foreach (var ql in quadLines)
  992. {
  993. foreach (var (_, line) in ql)
  994. {
  995. if (line != resultLines[line.ScreenLineIndex] && line.Batch < 1) // batch0才做黑边的内部线条检测
  996. {
  997. var pedal = o0Extension.PointPedal(line.Line, avgPoint, out _);
  998. var a0 = pedal - avgPoint;
  999. var a0L = a0.Length;
  1000. line.DistanceToMiddle = a0L;
  1001. var a1 = resultPedal[line.ScreenLineIndex] - avgPoint;
  1002. var a1L = a1.Length;
  1003. if (a0L < a1L)
  1004. {
  1005. var dotN = a0.Dot(a1) / a0L / a1L;
  1006. if (Math.Abs(dotN - 1) < 0.002) // 接近平行即可
  1007. result.Add(line);
  1008. }
  1009. }
  1010. }
  1011. }
  1012. return result;
  1013. }
  1014. void UpdateResultlines(LineIdentified[] result, (LineIdentified a, LineIdentified b) inter)
  1015. {
  1016. if (inter.a != null) // 替换上一步筛选的结果中的部分边,得到最终的结果
  1017. result[inter.a.ScreenLineIndex] = inter.a;
  1018. if (inter.b != null)
  1019. result[inter.b.ScreenLineIndex] = inter.b;
  1020. }
  1021. (LineIdentified a, LineIdentified b) FindInterLinePair(InterLineGuess lineGuess, List<LineIdentified> interSelectable, int maxCountToSelect = 8)
  1022. {
  1023. Debug.Log("[ScreenIdentification] selectable inter line count: " + interSelectable.Count);
  1024. interSelectable.Sort((a, b) => b.ZIMGradient.CompareTo(a.ZIMGradient));
  1025. int count = 0;
  1026. LineIdentified[] selected = new LineIdentified[4];
  1027. foreach (var line in interSelectable)
  1028. {
  1029. if (line.GuessIsInterLine(lineGuess)) // 评价是不是Line, 并且找到离中心点最近的
  1030. {
  1031. if (ScreenLocate.Main.DebugOnZIMDemo)
  1032. Debug.Log($"[ScreenIdentification] {interSelectable.IndexOf(line)}, guess is line: (index)" + line.ScreenLineIndex);
  1033. if (selected[line.ScreenLineIndex] == null || selected[line.ScreenLineIndex].DistanceToMiddle > line.DistanceToMiddle)
  1034. selected[line.ScreenLineIndex] = line;
  1035. }
  1036. if (count++ >= maxCountToSelect)
  1037. break;
  1038. }
  1039. var selectedList = new List<LineIdentified>();
  1040. foreach (var i in selected)
  1041. {
  1042. if (i != null)
  1043. selectedList.Add(i);
  1044. }
  1045. if (selectedList.Count == 4)
  1046. {
  1047. if (selected[0].ZIMGradient + selected[2].ZIMGradient > selected[1].ZIMGradient + selected[3].ZIMGradient)
  1048. return (selected[0], selected[2]);
  1049. else
  1050. return (selected[1], selected[3]);
  1051. }
  1052. else if (selected[0] != null && selected[2] != null)
  1053. return (selected[0], selected[2]);
  1054. else if (selected[1] != null && selected[3] != null)
  1055. return (selected[1], selected[3]);
  1056. else if (selectedList.Count == 2)
  1057. return selectedList[0].ZIMGradient > selectedList[1].ZIMGradient ? (selectedList[0], null) : (selectedList[1], null);
  1058. else if (selectedList.Count == 1)
  1059. return (selectedList[0], null);
  1060. else
  1061. return (null, null);
  1062. }
  1063. void SaveImages(string FileDirectory, string log,
  1064. Texture2D ScreenLocateTex, Texture2D allLinesTex, Texture2D ChoosableLineTex, Texture2D ScreenQuadTex)
  1065. {
  1066. if (!Directory.Exists(FileDirectory))
  1067. Directory.CreateDirectory(FileDirectory);
  1068. var time = DateTime.Now.ToString("yyyyMMdd_HHmmss");
  1069. var pngData = ScreenLocate.Main.OutputTextures[7]?.EncodeToPNG();
  1070. if (pngData != null)
  1071. File.WriteAllBytes($"{FileDirectory}{time}A屏幕原图.png", pngData);
  1072. var pngData1 = ScreenLocateTex?.EncodeToPNG();
  1073. if (pngData1 != null)
  1074. File.WriteAllBytes($"{FileDirectory}{time}B黑白色差.png", pngData1);
  1075. var pngData2 = allLinesTex?.EncodeToPNG();
  1076. if (pngData2 != null)
  1077. File.WriteAllBytes($"{FileDirectory}{time}C全部识别线段_半自动.png", pngData2);
  1078. var pngData3 = ChoosableLineTex?.EncodeToPNG();
  1079. if (pngData3 != null)
  1080. File.WriteAllBytes($"{FileDirectory}{time}D备选线段_半自动.png", pngData3);
  1081. var pngData4 = ScreenQuadTex?.EncodeToPNG();
  1082. if (pngData4 != null)
  1083. File.WriteAllBytes($"{FileDirectory}{time}E识别结果.png", pngData4);
  1084. Debug.Log($"<color=aqua>({time}) 屏幕识别图片保存至:程序根目录/{FileDirectory}</color>");
  1085. log +=
  1086. $"\r\n屏幕原图保存{pngData != null}, " +
  1087. $"\r\n黑白色差保存{pngData1 != null}, " +
  1088. $"\r\n全部识别线段(半自动)保存{pngData2 != null}, " +
  1089. $"\r\n备选线段(半自动)保存{pngData3 != null}, " +
  1090. $"\r\n识别结果保存{pngData4 != null}";
  1091. File.WriteAllText($"{FileDirectory}{time}屏幕自动定位_日志.log", log);
  1092. }
  1093. }
  1094. }