ScreenIdentification.cs 49 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094
  1. #define ENABLE_LOG
  2. using o0.Geometry2D.Float;
  3. using o0.Num;
  4. using System;
  5. using System.Collections.Generic;
  6. using System.IO;
  7. using System.Linq;
  8. using System.Threading.Tasks;
  9. using UnityEngine;
  10. using UnityEngine.UIElements;
  11. using ZIM;
  12. using ZIM.Unity;
  13. namespace o0.Project
  14. {
  15. public partial class ScreenIdentification
  16. {
  17. private const string TAG = "ScreenIdentification#";
  18. // LocateAreaData表示每次屏幕的色差变化的区域,可能有多次。通过设置LocateSingleStep可调整为仅识别一次色差
  19. static Rect[][] LocateAreaData = new Rect[][] {
  20. new Rect[] { new Rect(0f, 0f, 0.3f, 0.3f), new Rect(0f, 0f, 0.4f, 0.4f), new Rect(0f, 0f, 0.5f, 0.5f) },
  21. new Rect[] { new Rect(0.7f, 0f, 0.3f, 0.3f), new Rect(0.6f, 0f, 0.4f, 0.4f), new Rect(0.5f, 0f, 0.5f, 0.5f) },
  22. new Rect[] { new Rect(0f, 0.7f, 0.3f, 0.3f), new Rect(0f, 0.6f, 0.4f, 0.4f), new Rect(0f, 0.5f, 0.5f, 0.5f) },
  23. new Rect[] { new Rect(0.7f, 0.7f, 0.3f, 0.3f), new Rect(0.6f, 0.6f, 0.4f, 0.4f), new Rect(0.5f, 0.5f, 0.5f, 0.5f) }
  24. };
  25. //static Rect[][] LocateAreaData = new Rect[][] {
  26. // new Rect[] { new Rect(0f, 0f, 0.3f, 0.3f), new Rect(0f, 0f, 0.4f, 0.4f), new Rect(0f, 0f, 0.5f, 0.5f), new Rect(0f, 0f, 0.6f, 0.6f) },
  27. // new Rect[] { new Rect(0.7f, 0f, 0.3f, 0.3f), new Rect(0.6f, 0f, 0.4f, 0.4f), new Rect(0.5f, 0f, 0.5f, 0.5f), new Rect(0.4f, 0f, 0.6f, 0.6f) },
  28. // new Rect[] { new Rect(0f, 0.7f, 0.3f, 0.3f), new Rect(0f, 0.6f, 0.4f, 0.4f), new Rect(0f, 0.5f, 0.5f, 0.5f), new Rect(0f, 0.4f, 0.6f, 0.6f) },
  29. // new Rect[] { new Rect(0.7f, 0.7f, 0.3f, 0.3f), new Rect(0.6f, 0.6f, 0.4f, 0.4f), new Rect(0.5f, 0.5f, 0.5f, 0.5f), new Rect(0.4f, 0.4f, 0.6f, 0.6f) }
  30. //};
  31. //static bool LocateSingleStep = false;
  32. static bool LocateSingleStep = true;
  33. public Geometry2D.Vector<int> Size => ScreenLocate.Main.CameraSize;
  34. public QuadrilateralInCamera QuadManual;
  35. public QuadrilateralInCamera QuadAuto; // 全自动,可以给用户选择(赋值给Screen.QuadInCamera即生效)
  36. public QuadrilateralInCamera QuadSemiAuto; // 半自动,可以给用户选择(赋值给Screen.QuadInCamera即生效)
  37. public ScreenMap Screen; // 识别到的屏幕,用于执行透视变换
  38. int capture = 0;
  39. int delay = 0;
  40. int maxCapture;
  41. int maxDelay;
  42. Geometry.Vector<float>[] ScreenBlackTexture;
  43. Geometry.Vector<float>[] ScreenWhiteTexture;
  44. int locateIndex = -1;
  45. readonly List<Rect> locateArea = new List<Rect> {
  46. new Rect(0f, 0f, 0.5f, 0.5f), new Rect(0.5f, 0f, 0.5f, 0.5f), new Rect(0f, 0.5f, 0.5f, 0.5f), new Rect(0.5f, 0.5f, 0.5f, 0.5f)
  47. }; // 屏幕显示白色的区域大小
  48. float areaPercent => locateArea[locateIndex].size.x; // 当前白色区域的占比
  49. int areaSelected = -1; // 选择哪个区域,顺序与Quadrilateral对应
  50. readonly List<float> sumTemp = new List<float>();
  51. readonly List<QuadrilateralInCamera> quadTemp = new List<QuadrilateralInCamera>();
  52. //public ScreenIdentification(WebCamTexture texture)
  53. //{
  54. // Size = new Geometry2D.Vector<int>(texture.width, texture.height);
  55. // Screen = new ScreenMap();
  56. //}
  57. public static UnityEngine.Color FloatValueToColor(float i)
  58. {
  59. return i switch
  60. {
  61. 1 => UnityEngine.Color.green,
  62. 2 => UnityEngine.Color.red,
  63. 3 => UnityEngine.Color.yellow,
  64. 4 => UnityEngine.Color.white,
  65. _ => UnityEngine.Color.black,
  66. };
  67. }
  68. public ScreenIdentification()
  69. {
  70. Screen = new ScreenMap();
  71. OnLocateScreenEnter += () => Application.targetFrameRate = 30; // 固定识别的帧率,确保摄像机拍到正确的画面
  72. OnLocateScreenEnd += () => Application.targetFrameRate = 60;
  73. }
  74. public void SetScreenQuad(QuadrilateralInCamera quad) => Screen.QuadInCamera = quad;
  75. // 上一次半自动识别的情况,false代表这条边识别失败,线段顺序: 下、右、上、左
  76. public bool[] LastQuadSemiAutoState;
  77. public event Action OnLocateScreenEnter;
  78. public event Action OnLocateScreenEnd;
  79. public bool bStartLocateScreen { get; set; } = false;//是否进行捕获
  80. public bool SelectScreenAfterLocate(ScreenLocate.ScreenIdentificationTag tag)
  81. {
  82. QuadrilateralInCamera target = tag switch
  83. {
  84. ScreenLocate.ScreenIdentificationTag.Manual => QuadManual,
  85. ScreenLocate.ScreenIdentificationTag.SemiAuto => QuadSemiAuto,
  86. ScreenLocate.ScreenIdentificationTag.Auto => QuadAuto,
  87. _ => null
  88. };
  89. if (target == null)
  90. return false;
  91. Debug.Log($"<color=aqua>[ScreenIdentification] 选择已识别到的屏幕({Enum.GetName(typeof(ScreenLocate.ScreenIdentificationTag), tag)}), {target}</color>");
  92. SetScreenQuad(target);
  93. return true;
  94. }
  95. // 自动识别开始的入口
  96. public void LocateScreen(int Capture = 30, int Delay = 30) //数值单位是frame
  97. {
  98. if (ScreenLocate.Main.DebugScreenImages.Count != 0 && ScreenLocate.Main.DebugOnZIMDemo) // 这段仅用于测试图片
  99. {
  100. ScreenLocate.Main.CameraSize = new Geometry2D.Vector<int>(ScreenLocate.Main.DebugScreenImages[0].width, ScreenLocate.Main.DebugScreenImages[0].height);
  101. DebugImage(ScreenLocate.Main.DebugScreenImages);
  102. Screen.QuadInCamera = quadTemp[0];
  103. ScreenLocate.SetScreen(null);
  104. ScreenLocate.Main.ShowScreen(ScreenLocate.Main.ScreenQuad, Screen.QuadInCamera);
  105. delay = 0;
  106. capture = 0;
  107. ScreenWhiteTexture = null;
  108. ScreenBlackTexture = null;
  109. locateIndex = -1;
  110. areaSelected = -1;
  111. quadTemp.Clear();
  112. sumTemp.Clear();
  113. ScreenLocate.Main.DebugScreenImages.Clear();
  114. return;
  115. }
  116. delay = Math.Max(Delay, 5);
  117. capture = Math.Max(Capture, 5);
  118. maxDelay = Delay;
  119. maxCapture = Capture;
  120. ScreenLocate.SetScreen(new Rect(0f, 0f, 1f, 1f), UnityEngine.Color.black);
  121. //ScreenLocate.SetScreen(new Rect(0f, 0f, 0.6f, 0.6f), UnityEngine.Color.white);
  122. //bStartLocateScreen = false;
  123. OnLocateScreenEnter?.Invoke();
  124. }
  125. /// <summary>
  126. /// 开始进行捕获
  127. /// 初始化了两个数据 capture 和 delay
  128. /// </summary>
  129. /// <returns></returns>
  130. public bool isInitLocateScreen()
  131. {
  132. return capture != 0 && delay != 0;
  133. }
  134. void DebugImage(List<Texture2D> images)
  135. {
  136. QuadrilateralFit(images);
  137. //var watch = new System.Diagnostics.Stopwatch();
  138. //watch.Start();
  139. //var times = new List<double>() { 0.0 };
  140. #if (!NDEBUG && DEBUG && ENABLE_LOG)
  141. Console.WriteLine($"{TAG} quadTemp.Count:{quadTemp.Count}");
  142. #endif
  143. if (quadTemp.Count > 0)
  144. {
  145. ScreenLocate.Main.ShowScreen(ScreenLocate.Main.outputRawImages[4].transform.GetChild(0) as RectTransform, quadTemp[0]);
  146. // 透视变换
  147. // var srcWidth = LocateLightedRedTex.width;
  148. // var transformWidth = (int)((quad.B.x - quad.A.x + quad.D.x - quad.C.x) / 2);
  149. // var transformHeight = (int)((quad.C.y - quad.A.y + quad.D.y - quad.B.y) / 2);
  150. // var transformTex = new Texture2D(transformWidth, transformHeight);
  151. // var pt = new ZIMPerspectiveTransform(new OrdinalQuadrilateral(new Vector(0, 0), new Vector(transformWidth, 0), new Vector(0, transformHeight), new Vector(transformWidth, transformHeight)), quad);
  152. // var dstPixel = new UnityEngine.Color[transformWidth * transformHeight];
  153. // var srcPixel = LocateLightedRedTex.GetPixels();
  154. // Parallel.For(0, transformWidth, (x) =>
  155. // {
  156. // for (int y = 0; y < transformHeight; y++)
  157. // {
  158. // var index = y * transformWidth + x;
  159. // var sampleCoord = pt.TransformRound(x, y);
  160. // dstPixel[index] = srcPixel[sampleCoord.y * srcWidth + sampleCoord.x];
  161. // }
  162. // });
  163. // transformTex.SetPixels(dstPixel);
  164. // transformTex.Apply();
  165. // //ScreenLocate.DebugTexture(1, transformTex);
  166. //#if (!NDEBUG && DEBUG && ENABLE_LOG)
  167. // Console.WriteLine($"{TAG} ScreenLocate.DebugTexture 1:{transformTex.GetNativeTexturePtr()}");
  168. //#endif
  169. }
  170. //times.Add(watch.ElapsedMilliseconds);
  171. //Debug.Log("time: " + (times[times.Count - 1] - times[times.Count - 2]));
  172. }
  173. public void NextScreen()
  174. {
  175. // 只识别一次色差变化
  176. if (LocateSingleStep && areaSelected == -1)
  177. {
  178. LocateAreaData = new Rect[][] { new Rect[] { new Rect(0, 0, 1f, 1f) } };
  179. locateIndex = 3;
  180. areaSelected = 0;
  181. locateArea.AddRange(LocateAreaData[0]);
  182. }
  183. // index从-1开始
  184. locateIndex++;
  185. if (locateIndex < locateArea.Count) // 依次点亮屏幕区域
  186. {
  187. ScreenLocate.SetScreen(locateArea[locateIndex], UnityEngine.Color.white);
  188. delay = maxDelay;
  189. capture = maxCapture;
  190. }
  191. else // 退出屏幕黑白控制
  192. {
  193. ScreenLocate.SetScreen(null);
  194. ScreenLocate.Main.ShowScreen(ScreenLocate.Main.ScreenQuad, Screen.QuadInCamera);
  195. Reset();
  196. }
  197. }
  198. // 清除记录的屏幕识别数据(手动、自动等)
  199. public void ClearQuadCache()
  200. {
  201. SetScreenQuad(null);
  202. QuadManual = null;
  203. QuadSemiAuto = null;
  204. QuadAuto = null;
  205. }
  206. public void Reset()
  207. {
  208. // bStartLocateScreen = false;
  209. delay = 0;
  210. capture = 0;
  211. ScreenWhiteTexture = null;
  212. ScreenBlackTexture = null;
  213. locateIndex = -1;
  214. areaSelected = -1;
  215. if (locateArea.Count > 4)
  216. locateArea.RemoveRange(4, LocateAreaData[0].Length);
  217. quadTemp.Clear();
  218. sumTemp.Clear();
  219. }
  220. public void CaptureBlack(Texture2D cam)
  221. {
  222. if (ScreenBlackTexture == null)
  223. ScreenBlackTexture = new Geometry.Vector<float>[Size.x * Size.y];
  224. var pixel = cam.GetPixels();
  225. Parallel.For(0, Size.x * Size.y, i =>
  226. {
  227. var ip = pixel[i];
  228. ScreenBlackTexture[i] += new Geometry.Vector<float>(ip.r / maxCapture, ip.g / maxCapture, ip.b / maxCapture);
  229. });
  230. }
  231. public void CaptureWhite(Texture2D cam)
  232. {
  233. if (ScreenWhiteTexture == null)
  234. ScreenWhiteTexture = new Geometry.Vector<float>[Size.x * Size.y];
  235. var pixel = cam.GetPixels();
  236. Parallel.For(0, Size.x * Size.y, i =>
  237. {
  238. var ip = pixel[i];
  239. ScreenWhiteTexture[i] += new Geometry.Vector<float>(ip.r / maxCapture, ip.g / maxCapture, ip.b / maxCapture);
  240. });
  241. }
  242. public void CaptureStay(Texture2D cam)
  243. {
  244. if (locateIndex == -1) // 屏幕黑色
  245. {
  246. CaptureBlack(cam);
  247. }
  248. else // 屏幕部分为白色
  249. {
  250. CaptureWhite(cam);
  251. }
  252. }
  253. public void CaptureEnd()
  254. {
  255. //Debug.Log("locateIndex: " + locateIndex + ", quad: " + quadTemp.Count);
  256. if (locateIndex == -1)
  257. return;
  258. if (locateIndex < 4)
  259. {
  260. sumTemp.Add(GetBrightness());
  261. ScreenWhiteTexture = null;
  262. // 选择亮度差最大的区域
  263. if (locateIndex == 3)
  264. {
  265. areaSelected = sumTemp.MaxIndex();
  266. locateArea.AddRange(LocateAreaData[areaSelected]);
  267. }
  268. }
  269. else if (locateIndex >= 4 && locateIndex < locateArea.Count - 1)
  270. {
  271. QuadrilateralFit();
  272. ScreenWhiteTexture = null;
  273. }
  274. else
  275. {
  276. QuadrilateralFit();
  277. if (quadTemp.Count != LocateAreaData[0].Length)
  278. {
  279. Debug.Log($"<color=yellow>[ScreenIdentification] 拟合四边形失败, quadTemp.Count: {quadTemp.Count}</color>");
  280. }
  281. else if (quadTemp.Count == 1)
  282. {
  283. SetScreenQuad(quadTemp[0]);
  284. Debug.Log($"[ScreenIdentification] 拟合成功,识别数据: {Screen.QuadInCamera}");
  285. }
  286. else
  287. {
  288. // Debug.Log($"拟合四边形 2 , quadTemp.Count: {quadTemp.Count}");
  289. // 线性拟合
  290. var xValue = new List<float>() { 0 };
  291. var predicts = new List<Vector>();
  292. foreach (var i in LocateAreaData[0])
  293. xValue.Add(i.size.x);
  294. Vector baseVertex = Vector.Zero; // x==0 时的点
  295. {
  296. foreach (var q in quadTemp)
  297. {
  298. baseVertex += q.Quad[areaSelected];
  299. }
  300. baseVertex /= quadTemp.Count;
  301. }
  302. double rs = 0.0;
  303. for (int i = 0; i < 4; i++)
  304. {
  305. if (i == areaSelected)
  306. {
  307. predicts.Add(baseVertex);
  308. }
  309. else
  310. {
  311. var yValue = new List<Vector>() { baseVertex };
  312. foreach (var q in quadTemp)
  313. {
  314. yValue.Add(q.Quad[i]);
  315. }
  316. var lr = LinerRegression1D.Fit(2, xValue.ToArray(), yValue.ToArray());
  317. rs += lr.RSquared / 3;
  318. predicts.Add(lr.Predict<Vector>(1));
  319. }
  320. }
  321. SetScreenQuad(new QuadrilateralInCamera(predicts, new Vector(Size.x, Size.y)));
  322. Debug.Log($"[ScreenIdentification] 拟合成功,RSquared: {rs}, Quad: {Screen.QuadInCamera}");
  323. //if (rs < 0.8) Screen.Quad = null;
  324. }
  325. OnLocateScreenEnd?.Invoke();
  326. }
  327. }
  328. public bool Update(Texture2D cam)
  329. {
  330. //if (!bStartLocateScreen) return false;
  331. if (delay != 0)
  332. {
  333. //ScreenLocate.Main.CreateUVCTexture2DFocusSizeIfNeeded(1280, 720);
  334. delay--;
  335. if (delay == 0)
  336. {
  337. ScreenLocate.Main.CameraSize = new Geometry2D.Vector<int>(cam.width, cam.height); // 记录当前的分辨率
  338. Debug.Log("[ScreenIdentification] 采样纹理,记录采样分辨率: [" + Size.x + ", " + Size.y + "]");
  339. }
  340. return true;
  341. }
  342. if (capture != 0)
  343. {
  344. //ScreenLocate.Main.CreateUVCTexture2DFocusSizeIfNeeded(1280, 720);
  345. CaptureStay(cam);
  346. capture--;
  347. if (capture == 0)
  348. {
  349. CaptureEnd();
  350. NextScreen();
  351. }
  352. return true;
  353. }
  354. return false;
  355. #region Old
  356. /*
  357. if (delay != 0)
  358. {
  359. delay--;
  360. return true;
  361. }
  362. if (capture != 0)
  363. {
  364. capture--;
  365. if (ScreenBlackTexture == null)
  366. ScreenBlackTexture = new Geometry.Vector<float>[Size.x * Size.y];
  367. var pixel = cam.GetPixels();
  368. Parallel.For(0, Size.x * Size.y, i =>
  369. {
  370. var ip = pixel[i];
  371. ScreenBlackTexture[i] += new Geometry.Vector<float>(ip.r, ip.g, ip.b);
  372. });
  373. if (capture == 0)
  374. ScreenLocate.SetScreen(UnityEngine.Color.black);
  375. return true;
  376. }
  377. if (delay != 0)
  378. {
  379. delay--;
  380. return true;
  381. }
  382. if (capture != 0)
  383. {
  384. capture--;
  385. if (ScreenWhiteTexture == null)
  386. ScreenWhiteTexture = new Geometry.Vector<float>[Size.x * Size.y];
  387. var pixel = cam.GetPixels();
  388. Parallel.For(0, Size.x * Size.y, i =>
  389. {
  390. var ip = pixel[i];
  391. ScreenWhiteTexture[i] += new Geometry.Vector<float>(ip.r, ip.g, ip.b);
  392. });
  393. if (capture == 0)
  394. ScreenLocate.SetScreen(UnityEngine.Color.black);
  395. return true;
  396. }
  397. if (delay != 0)
  398. {
  399. delay--;
  400. return true;
  401. }
  402. if (capture != 0)
  403. {
  404. capture--;
  405. var pixel = cam.GetPixels();
  406. Parallel.For(0, Size.x * Size.y, i =>
  407. {
  408. var ip = pixel[i];
  409. ScreenWhiteTexture[i] -= new Geometry.Vector<float>(ip.r, ip.g, ip.b);
  410. });
  411. if (capture == 0)
  412. {
  413. ScreenLocate.SetScreen(null);
  414. UnityEngine.Color[] newPixel = new UnityEngine.Color[Size.x * Size.y];
  415. Parallel.For(0, Size.x * Size.y, i => {
  416. var pi = ScreenWhiteTexture[i] /= capture;
  417. newPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z);
  418. });
  419. //读取数据
  420. //{
  421. // var fileName = "3.bin";
  422. // ScreenLocateTexture = $"2023 04 16 厦门测试数据/{fileName}".FileReadByte<Vector<float>[]>();
  423. // Debug.Log($"Read {fileName}");
  424. // Parallel.For(0, Size.x * Size.y, i =>
  425. // {
  426. // var pi = ScreenLocateTexture[i];
  427. // newPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z);
  428. // });
  429. //}
  430. var ScreenLocateTex = new Texture2D(Size.x, Size.y);
  431. ScreenLocateTex.SetPixels(newPixel);
  432. ScreenLocateTex.Apply();
  433. //ScreenLocate.DebugTexture(2, ScreenLocateTex);
  434. var ScreenLocateTexLighted = ScreenLocateTex.AutoLight(10);
  435. //ScreenLocate.DebugTexture(2, ScreenLocateTexLighted);
  436. //var FileSavePath = Application.persistentDataPath + "/ScreenLocateTexture.bin";
  437. bool Save = ScreenLocate.Main.SaveToggle.isOn;
  438. string time;
  439. if (Save)
  440. {
  441. time = DateTime.Now.ToString("yyyyMMdd_HHmmss");
  442. var FileSavePath = $"屏幕定位数据{time}.bin";
  443. FileSavePath.FileWriteByte(ScreenWhiteTexture);
  444. var bytes = ScreenLocateTexLighted.EncodeToPNG();
  445. File.WriteAllBytes($"屏幕定位数据{time}.png", bytes);
  446. Debug.Log("ScreenLocateTexture Saved To: " + FileSavePath);
  447. }
  448. var ScreenLocateTexR = ScreenLocateTexLighted.ToRGB(ColorChannel.Red);
  449. var ScreenLocateTexG = ScreenLocateTexLighted.ToRGB(ColorChannel.Green);
  450. var ScreenLocateTexB = ScreenLocateTexLighted.ToRGB(ColorChannel.Blue);
  451. ScreenLocate.DebugTexture(2, ScreenLocateTexR);
  452. //ScreenLocate.DebugTexture(4, ScreenLocateTexG);
  453. //ScreenLocate.DebugTexture(5, ScreenLocateTexB);
  454. var watch = new System.Diagnostics.Stopwatch();
  455. watch.Start();
  456. var times = new List<double>() { 0.0 };
  457. var ScreenLocateTexLightedMat = ScreenLocateTexLighted.Too0Mat();
  458. //var ScreenLocateTexLightedMat = texture.Too0Mat();
  459. //var (edge, edgeDir) = ScreenLocateTexLightedMat.IdentifyEdge();
  460. var (edge, edgeDir) = ScreenLocateTexLightedMat.zimIdentifyEdgeGradientAny(15);
  461. //ScreenLocate.DebugTexture(4, ScreenLocateTexLighted.Too0Mat().IdentifyEdgeGradient().ToTex());
  462. //ScreenLocate.DebugTexture(4, edge.ToTex());
  463. var quadLines = ScreenLocateTexLightedMat.IdentifyQuadLSD(edge, edgeDir, out List<Line> lightLines, 30);
  464. var drawLineMap = new MatrixF2D(edge..Size.x, edge.Size.y);
  465. int lineCount = 0;
  466. foreach (var l in quadLines)
  467. {
  468. if (l != null)
  469. {
  470. o0Extension.DrawLine(drawLineMap.DrawLine(l, (x, y) => 1, new Geometry2D.Float.Vector(0, 10));
  471. lineCount++;
  472. }
  473. }
  474. if (lineCount == 4)
  475. {
  476. var a = quadLines[0].Intersect(quadLines[3], false).Value;
  477. var b = quadLines[0].Intersect(quadLines[1], false).Value;
  478. var c = quadLines[2].Intersect(quadLines[3], false).Value;
  479. var d = quadLines[1].Intersect(quadLines[2], false).Value;
  480. Quad = new Quadrilateral(a, b, c, d);
  481. if (!Quad.IsInScreen(ScreenLocate.Main.WebCamera.Size))
  482. Quad = null;
  483. }
  484. ScreenLocate.Main.ShowScreen(Quad);
  485. //var lines = edge.IdentifyLineLSD(edgeDir, 100);
  486. ////var lines = ScreenLocateTexLightedMat.IdentifyLineLSD();
  487. //var drawLineMap = new MatrixF2D(edge..Size.x, edge.Size.y);
  488. //var returnMaxLines = lines.Sub(0, 10);
  489. //foreach (var (line, sum, gradient) in returnMaxLines)
  490. // o0Extension.DrawLine(drawLineMap.DrawLine(line, (x, y) => 1, new Geometry2D.Float.Vector(0, 10));
  491. ScreenLocate.DebugTexture(3, drawLineMap.ToTex());
  492. //{
  493. // var bytes = drawLineMap.ToTex().EncodeToPNG();
  494. // File.WriteAllBytes($"屏幕定位数据DrawLineMap.png", bytes);
  495. //}
  496. times.Add(watch.ElapsedMilliseconds);
  497. Debug.Log("time: " + (times[times.Count - 1] - times[times.Count - 2]));
  498. //ScreenLocate.DebugTexture(5, edge.IdentifyLine(edgeDir).ToTex());
  499. //ScreenLocate.DebugTexture(4, ScreenLocateTexLighted.Too0Mat().IdentifyEdgeGradientX().ToTex());
  500. //ScreenLocate.DebugTexture(5, ScreenLocateTexLighted.Too0Mat().IdentifyEdgeGradientY().ToTex());
  501. //var convolutionLighted2 = ScreenLocateTexLighted.Too0Mat().IdentifyEdgeVariance().ToTex();
  502. // opecncv处理
  503. // zim
  504. {
  505. //var cvLines = edge.cvHoughLinesP();
  506. //ScreenLocate.DebugTexture(5, cvLines);
  507. //var myLines = Hough.Transform(edgeMat);
  508. //var cvLines = edge.cvLine(myLines);
  509. //ScreenLocate.DebugTexture(5, cvLines);
  510. }
  511. UnityEngine.Object.Destroy(ScreenLocateTex);
  512. //ScreenLocate.DebugTexture(4, convolutionLighted2);
  513. }
  514. return true;
  515. }
  516. /*
  517. var avg = new Geometry4D.Vector<float>();
  518. var pixel = texture.GetPixels();
  519. foreach(var i in pixel.Index())
  520. {
  521. var iP = pixel[i];
  522. avg += new Geometry4D.Vector<float>(iP.r, iP.g, iP.b, iP.a);
  523. }
  524. avg /= pixel.Count();
  525. /*
  526. var (texLightedR, texLightedG, texLightedB) = ToRGB(newTex);
  527. ScreenLocate.DebugTexture(3, texLightedR);
  528. ScreenLocate.DebugTexture(4, texLightedG);
  529. ScreenLocate.DebugTexture(5, texLightedB);
  530. //Debug.Log(avg);
  531. return false;
  532. /**/
  533. #endregion
  534. }
  535. float GetBrightness()
  536. {
  537. UnityEngine.Color[] differPixel = new UnityEngine.Color[Size.x * Size.y];
  538. Parallel.For(0, Size.x * Size.y, i =>
  539. {
  540. var pi = ScreenWhiteTexture[i] - ScreenBlackTexture[i];
  541. differPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z);
  542. });
  543. var sum = 0f;
  544. foreach (var i in differPixel)
  545. {
  546. sum += i.Brightness();
  547. }
  548. sum /= differPixel.Length;
  549. //Debug.Log(sum);
  550. return sum;
  551. }
  552. // 转换成屏幕定位所需的纹理图像
  553. Texture2D ToLocateTex(UnityEngine.Color[] pixels)
  554. {
  555. var ScreenLocateTex = new Texture2D(Size.x, Size.y);
  556. ScreenLocateTex.SetPixels(pixels);
  557. ScreenLocateTex.Apply();
  558. //ScreenLocate.DebugTexture(2, ScreenLocateTex);
  559. return ScreenLocateTex.AutoLight(10);
  560. //ScreenLocate.DebugTexture(2, ScreenLocateTexLighted);
  561. //var ScreenLocateTexR = ToLocateTex.ToRGB(ColorChannel.Red);
  562. //var ScreenLocateTexG = ToLocateTex.ToRGB(ColorChannel.Green);
  563. //var ScreenLocateTexB = ToLocateTex.ToRGB(ColorChannel.Blue);
  564. //LocateLightedRedTex = ScreenLocateTexR;
  565. //ScreenLocate.DebugTexture(2, ScreenLocateTexR);
  566. //ScreenLocate.DebugTexture(4, ScreenLocateTexG);
  567. //ScreenLocate.DebugTexture(5, ScreenLocateTexB);
  568. //var ScreenLocateTexLightedMat = texture.Too0Mat();
  569. }
  570. /// <param name="lineWidth">识别的最小线段长度</param>
  571. /// <param name="debugImages">这个参数如果不为null且数量大于0,则执行debug操作</param>
  572. void QuadrilateralFit(List<Texture2D> debugImages = null)
  573. {
  574. // 屏幕黑白差值,存放多批次的图像用于识别, 该List数量不能等于 0
  575. List<UnityEngine.Color[]> PixelsMultipleBatches = new List<UnityEngine.Color[]>();
  576. var sw = new System.Diagnostics.Stopwatch();
  577. sw.Start();
  578. //读取数据
  579. if (debugImages != null && debugImages.Count != 0)
  580. {
  581. foreach (var i in debugImages)
  582. {
  583. Debug.Log($"<color=aqua>Debug {i.name}</color>");
  584. PixelsMultipleBatches.Add(i.GetPixels());
  585. }
  586. }
  587. else // 获得屏幕差值
  588. {
  589. var maxWhite = 0f;
  590. foreach (var i in ScreenWhiteTexture)
  591. {
  592. var m = i.x > i.y ? (i.x > i.z ? i.x : i.z) : (i.y > i.z ? i.y : i.z);
  593. if (maxWhite < m)
  594. maxWhite = m;
  595. }
  596. var scale = 1.0f / maxWhite; // 放大对比度
  597. var differPixel = new UnityEngine.Color[Size.x * Size.y];
  598. //var whitePixel = new UnityEngine.Color[Size.x * Size.y];
  599. Parallel.For(0, Size.x, x =>
  600. {
  601. for (int y = 0; y < Size.y; y++)
  602. {
  603. var i = y * Size.x + x;
  604. var d = ScreenWhiteTexture[i] - ScreenBlackTexture[i];
  605. differPixel[i] = new UnityEngine.Color(d.x, d.y, d.z) * scale;
  606. //whitePixel[i] = new UnityEngine.Color(ScreenWhiteTexture[i].x, ScreenWhiteTexture[i].y, ScreenWhiteTexture[i].z) * scale;
  607. }
  608. });
  609. PixelsMultipleBatches.Add(differPixel); // 色差图
  610. //PixelsMultipleBatches.Add(whitePixel); // 原图
  611. }
  612. int conSize = (int)Math.Ceiling(0.007f * Size.y) * 2 + 1;
  613. conSize = Math.Max(conSize, 7); // 设置最小为7
  614. float minLength = conSize * 7.7f;
  615. minLength = locateIndex == -1 ? minLength : minLength * areaPercent; // minLength需要按比例缩小
  616. string log = $"[Log][ScreenLocate Auto] Size: ({Size.x},{Size.y}), 卷积核Size: {conSize}, 最小线段长度: {minLength}";
  617. var allLines = new List<LineIdentified>();
  618. List<Texture2D> LocateTexTemp = new List<Texture2D>();
  619. List<Matrix> ScreenLocateMatList = new List<Matrix>();
  620. foreach (var batch in PixelsMultipleBatches.Index())
  621. {
  622. var locateTex = ToLocateTex(PixelsMultipleBatches[batch]);
  623. LocateTexTemp.Add(locateTex);
  624. var ScreenLocateMat = locateTex.Too0Mat(); // 用于获取Lines的Matrix
  625. var lineCount = ZIMIdentifyQuadLSD(ref allLines, batch, ScreenLocateMat.zimIdentifyEdgeGradientAny(conSize), minLength);
  626. log += $"\r\n识别图片{batch}, 识别到的线段数量为: {lineCount}";
  627. ScreenLocateMatList.Add(ScreenLocateMat);
  628. }
  629. Texture2D ScreenLocateTexture = LocateTexTemp[0]; // for output
  630. // 如果有手动数据,刷新一下Size
  631. QuadManual?.ReSize(new Vector(Size.x, Size.y), ScreenMap.ViewAspectRatioSetting);
  632. // 估算屏幕中点,如果已有手动定位数据,根据现有数据取平均即可,否则从色差计算,ScreenLocateMatList[0]默认是屏幕的黑白色差
  633. Vector AvgPoint = QuadManual != null ? QuadManual.Quad.Centroid : GetAvgPoint(ScreenLocateMatList[0]);
  634. // 过滤得到四边形的四条边,
  635. var (quadLinesSemiAuto, quadLinesAuto) = FilterLines(ScreenLocateMatList, allLines, AvgPoint,
  636. out LineIdentified[] manualLines, out List<LineIdentified> possibleLines,
  637. conSize, conSize, minLength);
  638. #region 全自动识别的结果
  639. List<LineIdentified> LineIdentifiedAuto = new List<LineIdentified>(); // 线段顺序: 下、右、上、左
  640. for (int i = 0; i < 4; i++)
  641. {
  642. if (quadLinesAuto[i] != null)
  643. LineIdentifiedAuto.Add(quadLinesAuto[i]);
  644. }
  645. if (LineIdentifiedAuto.Count == 4) // 判断识别的线段能否拼成屏幕,能拼成则记录
  646. {
  647. var a = LineIdentifiedAuto[0].Line.Intersect(LineIdentifiedAuto[3].Line, false).Value;
  648. var b = LineIdentifiedAuto[0].Line.Intersect(LineIdentifiedAuto[1].Line, false).Value;
  649. var c = LineIdentifiedAuto[2].Line.Intersect(LineIdentifiedAuto[3].Line, false).Value;
  650. var d = LineIdentifiedAuto[1].Line.Intersect(LineIdentifiedAuto[2].Line, false).Value;
  651. QuadAuto = new QuadrilateralInCamera(a, b, c, d, new Vector(Size.x, Size.y));
  652. if (!QuadAuto.IsQuadComplete())
  653. QuadAuto = null;
  654. }
  655. #endregion
  656. #region 半自动识别
  657. List<LineIdentified> LineIdentifiedSemiAuto = new List<LineIdentified>(); // 线段顺序: 下、右、上、左
  658. LastQuadSemiAutoState = new bool[4] { true, true, true, true };
  659. for (int i = 0; i < 4; i++)
  660. {
  661. if (quadLinesSemiAuto[i] != null)
  662. LineIdentifiedSemiAuto.Add(quadLinesSemiAuto[i]);
  663. else if (manualLines != null)
  664. {
  665. LineIdentifiedSemiAuto.Add(manualLines[i]);
  666. LastQuadSemiAutoState[i] = false;
  667. }
  668. }
  669. if (LineIdentifiedSemiAuto.Count == 4) // 判断识别的线段能否拼成屏幕,能拼成则记录
  670. {
  671. var a = LineIdentifiedSemiAuto[0].Line.Intersect(LineIdentifiedSemiAuto[3].Line, false).Value;
  672. var b = LineIdentifiedSemiAuto[0].Line.Intersect(LineIdentifiedSemiAuto[1].Line, false).Value;
  673. var c = LineIdentifiedSemiAuto[2].Line.Intersect(LineIdentifiedSemiAuto[3].Line, false).Value;
  674. var d = LineIdentifiedSemiAuto[1].Line.Intersect(LineIdentifiedSemiAuto[2].Line, false).Value;
  675. QuadSemiAuto = new QuadrilateralInCamera(a, b, c, d, new Vector(Size.x, Size.y));
  676. if (!QuadSemiAuto.IsQuadComplete())
  677. QuadSemiAuto = null;
  678. }
  679. #endregion
  680. // 优先应用半自动的结果(也可以在外部手动设置)
  681. if (QuadSemiAuto == null && QuadAuto == null && Screen.QuadInCamera != null) // 如果可能,回退到上一个screen
  682. {
  683. Debug.Log($"<color=yellow>[ScreenIdentification] 本次识别失败,回退到上次的识别结果: {Screen.QuadInCamera}</color>");
  684. quadTemp.Add(Screen.QuadInCamera);
  685. }
  686. else if (QuadSemiAuto != null)
  687. {
  688. Debug.Log($"<color=aqua>[ScreenIdentification] 识别到四边形(半自动): {QuadSemiAuto}</color>");
  689. quadTemp.Add(QuadSemiAuto);
  690. }
  691. else if (QuadAuto != null)
  692. {
  693. Debug.Log($"<color=aqua>[ScreenIdentification] 识别到四边形(全自动): {QuadAuto}</color>");
  694. quadTemp.Add(QuadAuto);
  695. }
  696. #region 绘制 output texture
  697. // 绘制半自动
  698. var ScreenQuadMap = new Matrix(Size, Tiling: true); // 识别的到的屏幕四边形(半自动和自动在一张图上)
  699. foreach (var i in LineIdentifiedSemiAuto.Index())
  700. {
  701. if (LastQuadSemiAutoState[i])
  702. o0Extension.DrawLine(ScreenQuadMap, LineIdentifiedSemiAuto[i].DrawLine, (x, y) => 2, new Geometry2D.Float.Vector(0, 10));
  703. else
  704. o0Extension.DrawLine(ScreenQuadMap, LineIdentifiedSemiAuto[i].DrawLine, (x, y) => 1, new Geometry2D.Float.Vector(0, 6), true);
  705. }
  706. // 绘制全自动
  707. foreach (var i in LineIdentifiedAuto.Index())
  708. o0Extension.DrawLine(ScreenQuadMap, LineIdentifiedAuto[i].DrawLine, (x, y) => 4, new Geometry2D.Float.Vector(0, 4), true);
  709. Texture2D ScreenQuad = ScreenQuadMap.ToTexRGBA(FloatValueToColor);
  710. Texture2D ScreenQuadWithScreen = ScreenQuad.Overlay(ScreenLocateTexture); // 叠加屏幕色差图
  711. // 绘制allLines
  712. var allLinesMap = new Matrix(Size, Tiling: true);
  713. foreach (var l in allLines)
  714. {
  715. if (l.DrawLine != null)
  716. o0Extension.DrawLine(allLinesMap, l.DrawLine, (x, y) => 3, new Geometry2D.Float.Vector(0, 2), true);
  717. }
  718. var allLinesTex = allLinesMap.ToTexRGBA(FloatValueToColor);
  719. ScreenLocate.DebugTexture(1, allLinesTex);
  720. // 还需要输出一张识别结果图,包含干扰线段
  721. var ChoosableLineMap = new Matrix(Size, Tiling: true);
  722. foreach (var l in possibleLines)
  723. {
  724. if (l != null && !quadLinesSemiAuto.Contains(l) && !manualLines.Contains(l))
  725. o0Extension.DrawLine(ChoosableLineMap, l.DrawLine, (x, y) => 3, new Geometry2D.Float.Vector(0, 2), true); // 其他的备选线段
  726. }
  727. foreach (var l in LineIdentifiedSemiAuto)
  728. {
  729. if (l != null)
  730. o0Extension.DrawLine(ChoosableLineMap, l.DrawLine, (x, y) => 2, new Geometry2D.Float.Vector(0, 5)); // 识别的结果
  731. }
  732. if (manualLines != null)
  733. {
  734. foreach (var l in manualLines)
  735. o0Extension.DrawLine(ChoosableLineMap, l.DrawLine, (x, y) => 1, new Geometry2D.Float.Vector(0, 2), true); // 旧的屏幕线段(例如上次手动识别的)
  736. }
  737. Texture2D ChoosableLineTex = ChoosableLineMap.ToTexRGBA(FloatValueToColor);
  738. #endregion
  739. log += $"\r\n屏幕四边形_手动识别{QuadManual != null}\r\n屏幕四边形_半自动识别{QuadSemiAuto != null}\r\n屏幕四边形_全自动识别{QuadAuto != null}";
  740. Debug.Log(log);
  741. // 是否将图片保存到本地
  742. if (ScreenLocate.Main.SaveToggle?.isOn ?? false && ScreenLocate.Main.DebugOnZIMDemo)
  743. {
  744. var FileDirectory = $"Debug_屏幕定位/";
  745. SaveImages(FileDirectory, log, ScreenLocateTexture, allLinesTex, ChoosableLineTex, ScreenQuad);
  746. }
  747. //times.Add(watch.ElapsedMilliseconds);
  748. //Debug.Log("time: " + (times[times.Count - 1] - times[times.Count - 2]));
  749. // opecncv处理, zim
  750. {
  751. //var cvLines = edge.cvHoughLinesP();
  752. //ScreenLocate.DebugTexture(5, cvLines);
  753. //var myLines = Hough.Transform(edgeMat);
  754. //var cvLines = edge.cvLine(myLines);
  755. //ScreenLocate.DebugTexture(5, cvLines);
  756. }
  757. {
  758. ScreenLocate.DebugTexture(2, ScreenLocateTexture);
  759. ScreenLocate.DebugTexture(3, ScreenQuad);
  760. ScreenLocate.DebugTexture(4, ScreenQuadWithScreen);
  761. ScreenLocate.DebugTexture(5, ChoosableLineTex);
  762. }
  763. foreach (var i in LocateTexTemp)
  764. {
  765. if (i != ScreenLocateTexture) // ScreenLocateTexture 由 ScreenLocate.DebugTexture 释放
  766. GameObject.Destroy(i);
  767. }
  768. }
  769. Vector GetAvgPoint(Matrix screenLocateMat)
  770. {
  771. // 加权平均
  772. Vector[] avgPointsColumn = new Vector[screenLocateMat.Size.x];
  773. float[] valueSumsColumn = new float[screenLocateMat.Size.x];
  774. Parallel.For(0, screenLocateMat.Size.x, i =>
  775. {
  776. for (int j = 0; j < screenLocateMat.Size.y; j++)
  777. {
  778. var value = screenLocateMat[i, j];
  779. valueSumsColumn[i] += value;
  780. avgPointsColumn[i] += new Vector(i, j) * value;
  781. }
  782. });
  783. Vector avgPoint = Vector.Zero;
  784. var valueSum = 0f;
  785. for (int i = 0; i < screenLocateMat.Size.x; i++)
  786. {
  787. avgPoint += avgPointsColumn[i];
  788. valueSum += valueSumsColumn[i];
  789. }
  790. avgPoint /= valueSum;
  791. return avgPoint;
  792. }
  793. // 返回查找到的线段数量,0是查找失败
  794. int ZIMIdentifyQuadLSD(ref List<LineIdentified> allLines, int batch, (Matrix edgeMat, Matrix edgeDirMat) edgeGradient,
  795. float minLength = 100)
  796. {
  797. var l = edgeGradient.edgeMat.IdentifyLineLSD(edgeGradient.edgeDirMat, minLength, 50, LineCaptureSize: new Vector(10, 6));
  798. if (l == null || l.Count == 0)
  799. return 0;
  800. allLines.AddRange(l.Select((i) => new LineIdentified(batch, i)));
  801. return l.Count;
  802. }
  803. // 返回四边形的四条边(半自动、全自动),List长度一定是4 (如果没有识别到就是null),且线段顺序是: 下、右、上、左
  804. (List<LineIdentified>, List<LineIdentified>) FilterLines(List<Matrix> screenLocateMatList, List<LineIdentified> allLines, Vector avgPoint,
  805. out LineIdentified[] manualLines, out List<LineIdentified> possibleLines, float conSize, float gradientLength, float minLength = 100)
  806. {
  807. //Debug.Log("[IdentifyLineLSD] lines.Count: " + lines.Count);
  808. var offset = new Vector((conSize - 1) / 2, (conSize - 1) / 2);
  809. // LSD计算得到的矩阵尺寸较小(因为卷积),这里必须进行位移
  810. for (int i = 0; i < allLines.Count; i++)
  811. allLines[i].Offset(offset);
  812. // 筛掉椭圆框外的线段(超出一半会筛掉)
  813. var innerLines = new List<LineIdentified>();
  814. for (int i = 0; i < allLines.Count; i++)
  815. {
  816. List<Vector> InArea = new List<Vector>();
  817. var dir = (allLines[i].Line.B - allLines[i].Line.A) / 4;
  818. var points = new Vector[5] { allLines[i].Line.A, allLines[i].Line.A + dir, allLines[i].Line.A + dir * 2f, allLines[i].Line.A + dir * 3f, allLines[i].Line.B }; // A点、中间的点、B点
  819. for (int pI = 0; pI < points.Length; pI++)
  820. {
  821. if (!ScreenLocate.Main.ScreenPixelCheaker.OutArea2D(points[pI], Size))
  822. InArea.Add(points[pI]);
  823. }
  824. if (InArea.Count < 2) // 少于2个点在内部
  825. continue;
  826. else if (InArea.Count < points.Length) // 不完全在内部
  827. allLines[i].DrawLine = new Line(InArea.First(), InArea.Last()); // 将部分线条设置为drawline,用于下一步的绘制
  828. else // 线段全部在椭圆内
  829. allLines[i].DrawLine = allLines[i].Line;
  830. innerLines.Add(allLines[i]);
  831. }
  832. // 角度阈值,用来判断线段的梯度方向是否指向屏幕中心(avgPoint)
  833. var avaAngleHalf = 75f;
  834. // 沿直线计算综合梯度(梯度乘以长度系数,再乘以距离系数), distanceRatio是实际距离除以最大距离
  835. float estimateGradient(LineIdentified line, float distanceRatio)
  836. {
  837. var dir = (line.Line.B - line.Line.A).Normalized;
  838. var vertical = new Vector(-dir.y, dir.x) * (gradientLength / 2);
  839. var step = 2;
  840. var ll = line.Line.Length;
  841. var lg = new List<float>();
  842. for (int i = 0; i <= ll; i += step)
  843. {
  844. var point = line.Line.A + dir * i;
  845. var ga = point + vertical;
  846. var gb = point - vertical;
  847. lg.Add(screenLocateMatList[line.Batch][(int)ga.x, (int)ga.y] - screenLocateMatList[line.Batch][(int)gb.x, (int)gb.y]);
  848. }
  849. float e = (float)Math.Sqrt(Math.Ceiling(line.Line.Length / minLength)); // 长度系数,筛选时梯度更大、长度更长的线段更优
  850. float d = (3 - distanceRatio) / 2; // 距离系数,距离越近,系数越大
  851. return e * d * Math.Abs(lg.Mean());
  852. }
  853. // 根据线段梯度的角度,判断是不是屏幕的边,out index代表是哪条边(顺序是: 下、右、上、左)
  854. bool isScreenLine(LineIdentified line, out int index)
  855. {
  856. var a = (avgPoint - (line.Line.A + line.Line.B) / 2).DegreeToXAxis();
  857. //Debug.Log(a + ", " + gradient + ", " + sum);
  858. index = -1;
  859. if (Math.Abs(a - line.GradientDegree) < avaAngleHalf || Math.Abs(a - 360 - line.GradientDegree) < avaAngleHalf || Math.Abs(a + 360 - line.GradientDegree) < avaAngleHalf)
  860. {
  861. if (line.GradientDegree > 45 && line.GradientDegree < 135) // 下
  862. index = 0;
  863. else if (line.GradientDegree > 135 && line.GradientDegree < 225) // 右
  864. index = 1;
  865. else if (line.GradientDegree > 225 && line.GradientDegree < 315) // 上
  866. index = 2;
  867. else
  868. index = 3;
  869. return true;
  870. }
  871. return false;
  872. }
  873. // 下、右、上、左, 半自动和自动
  874. var quadLinesSemiAuto = new List<(float, LineIdentified)>[4] { new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>() };
  875. var quadLinesAuto = new List<(float, LineIdentified)>[4] { new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>() };
  876. possibleLines = new List<LineIdentified>();
  877. // 如果已有手动定位数据,根据现有数据筛选线条(半自动)
  878. manualLines = null;
  879. if (QuadManual != null)
  880. {
  881. Debug.Log($"[IdentifyLineLSD] 根据已有定位数据做筛选, QuadManual: {QuadManual}");
  882. manualLines = QuadManual.GetLines().Select((i) => new LineIdentified(0, i, 0, 0, true)).ToArray();
  883. var calibration = ScreenLocate.Main.ReDoLocateCalibrationRatio * Size.y;
  884. var avgPointCross = manualLines.Select((i) => i.Line.LineCrossWithPoint(avgPoint)).ToArray(); // 对于平均点的corss值
  885. var avgPointPedal = manualLines.Select((i) => o0Extension.PointPedal(i.Line, avgPoint, out _)).ToArray(); // 当前定位的垂足,下、右、上、左
  886. foreach (var line in innerLines)
  887. {
  888. // 筛选条件:1-梯度方向匹配,2-垂足的距离足够近, 3-线段的AB点均在旧线段外部, 4-新的线段的中点,到旧线段的垂足,要在旧线段内
  889. if (isScreenLine(line, out int index))
  890. {
  891. var distanceToOld = (o0Extension.PointPedal(line.Line, avgPoint, out _) - avgPointPedal[index]).Length;
  892. if (distanceToOld < calibration &&
  893. manualLines[index].Line.LineCrossWithPoint(line.Line.A) * avgPointCross[index] <= 0 &&
  894. manualLines[index].Line.LineCrossWithPoint(line.Line.B) * avgPointCross[index] <= 0)
  895. {
  896. var middleToOldLine = o0Extension.PointPedal(manualLines[index].Line, (line.Line.A + line.Line.B) / 2, out bool inLineSegment);
  897. if (inLineSegment)
  898. {
  899. quadLinesSemiAuto[index].Add((estimateGradient(line, distanceToOld / calibration), line));
  900. possibleLines.Add(line);
  901. }
  902. }
  903. }
  904. }
  905. }
  906. // 全自动
  907. foreach (var line in allLines)
  908. {
  909. if (isScreenLine(line, out int index))
  910. {
  911. if (line.Batch < 1) // 全自动只处理第一张图,默认是色差图
  912. {
  913. quadLinesAuto[index].Add((estimateGradient(line, 1), line));
  914. }
  915. }
  916. }
  917. var resultSemiAuto = new LineIdentified[4];
  918. var resultAuto = new LineIdentified[4];
  919. for (int i = 0; i < 4; i++)
  920. {
  921. if (quadLinesSemiAuto[i].Count > 0)
  922. resultSemiAuto[i] = quadLinesSemiAuto[i].Max((a, b) => a.Item1.CompareTo(b.Item1)).Item2;
  923. if (quadLinesAuto[i].Count > 0)
  924. resultAuto[i] = quadLinesAuto[i].Max((a, b) => a.Item1.CompareTo(b.Item1)).Item2;
  925. }
  926. return (resultSemiAuto.ToList(), resultAuto.ToList());
  927. }
  928. void SaveImages(string FileDirectory, string log,
  929. Texture2D ScreenLocateTex, Texture2D allLinesTex, Texture2D ChoosableLineTex, Texture2D ScreenQuadTex)
  930. {
  931. if (!Directory.Exists(FileDirectory))
  932. Directory.CreateDirectory(FileDirectory);
  933. var time = DateTime.Now.ToString("yyyyMMdd_HHmmss");
  934. var pngData = ScreenLocate.Main.OutputTextures[7]?.EncodeToPNG();
  935. if (pngData != null)
  936. File.WriteAllBytes($"{FileDirectory}{time}A屏幕原图.png", pngData);
  937. var pngData1 = ScreenLocateTex?.EncodeToPNG();
  938. if (pngData1 != null)
  939. File.WriteAllBytes($"{FileDirectory}{time}B黑白色差.png", pngData1);
  940. var pngData2 = allLinesTex?.EncodeToPNG();
  941. if (pngData2 != null)
  942. File.WriteAllBytes($"{FileDirectory}{time}C全部识别线段_半自动.png", pngData2);
  943. var pngData3 = ChoosableLineTex?.EncodeToPNG();
  944. if (pngData3 != null)
  945. File.WriteAllBytes($"{FileDirectory}{time}D备选线段_半自动.png", pngData3);
  946. var pngData4 = ScreenQuadTex?.EncodeToPNG();
  947. if (pngData4 != null)
  948. File.WriteAllBytes($"{FileDirectory}{time}E识别结果.png", pngData4);
  949. Debug.Log($"<color=aqua>({time}) 屏幕识别图片保存至:程序根目录/{FileDirectory}</color>");
  950. log +=
  951. $"\r\n屏幕原图保存{pngData != null}, " +
  952. $"\r\n黑白色差保存{pngData1 != null}, " +
  953. $"\r\n全部识别线段(半自动)保存{pngData2 != null}, " +
  954. $"\r\n备选线段(半自动)保存{pngData3 != null}, " +
  955. $"\r\n识别结果保存{pngData4 != null}";
  956. File.WriteAllText($"{FileDirectory}{time}屏幕自动定位_日志.log", log);
  957. }
  958. }
  959. }