ScreenIdentification.cs 54 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214
  1. #define ENABLE_LOG
  2. using o0.Geometry2D.Float;
  3. using o0.Num;
  4. using System;
  5. using System.Collections.Generic;
  6. using System.IO;
  7. using System.Linq;
  8. using System.Threading.Tasks;
  9. using UnityEngine;
  10. using UnityEngine.UIElements;
  11. using UnityStandardAssets.Utility;
  12. using ZIM;
  13. using ZIM.Unity;
  14. namespace o0.Project
  15. {
  16. public partial class ScreenIdentification
  17. {
  18. private const string TAG = "ScreenIdentification#";
  19. // LocateAreaData表示每次屏幕的色差变化的区域,可能有多次。通过设置LocateSingleStep可调整为仅识别一次色差
  20. static Rect[][] LocateAreaData = new Rect[][] {
  21. new Rect[] { new Rect(0f, 0f, 0.3f, 0.3f), new Rect(0f, 0f, 0.4f, 0.4f), new Rect(0f, 0f, 0.5f, 0.5f) },
  22. new Rect[] { new Rect(0.7f, 0f, 0.3f, 0.3f), new Rect(0.6f, 0f, 0.4f, 0.4f), new Rect(0.5f, 0f, 0.5f, 0.5f) },
  23. new Rect[] { new Rect(0f, 0.7f, 0.3f, 0.3f), new Rect(0f, 0.6f, 0.4f, 0.4f), new Rect(0f, 0.5f, 0.5f, 0.5f) },
  24. new Rect[] { new Rect(0.7f, 0.7f, 0.3f, 0.3f), new Rect(0.6f, 0.6f, 0.4f, 0.4f), new Rect(0.5f, 0.5f, 0.5f, 0.5f) }
  25. };
  26. //static Rect[][] LocateAreaData = new Rect[][] {
  27. // new Rect[] { new Rect(0f, 0f, 0.3f, 0.3f), new Rect(0f, 0f, 0.4f, 0.4f), new Rect(0f, 0f, 0.5f, 0.5f), new Rect(0f, 0f, 0.6f, 0.6f) },
  28. // new Rect[] { new Rect(0.7f, 0f, 0.3f, 0.3f), new Rect(0.6f, 0f, 0.4f, 0.4f), new Rect(0.5f, 0f, 0.5f, 0.5f), new Rect(0.4f, 0f, 0.6f, 0.6f) },
  29. // new Rect[] { new Rect(0f, 0.7f, 0.3f, 0.3f), new Rect(0f, 0.6f, 0.4f, 0.4f), new Rect(0f, 0.5f, 0.5f, 0.5f), new Rect(0f, 0.4f, 0.6f, 0.6f) },
  30. // new Rect[] { new Rect(0.7f, 0.7f, 0.3f, 0.3f), new Rect(0.6f, 0.6f, 0.4f, 0.4f), new Rect(0.5f, 0.5f, 0.5f, 0.5f), new Rect(0.4f, 0.4f, 0.6f, 0.6f) }
  31. //};
  32. //static bool LocateSingleStep = false;
  33. static bool LocateSingleStep = true;
  34. public Geometry2D.Vector<int> Size => ScreenLocate.Main.CameraSize;
  35. public QuadrilateralInCamera QuadManual;
  36. public QuadrilateralInCamera QuadAuto; // 全自动,可以给用户选择(赋值给Screen.QuadInCamera即生效)
  37. public QuadrilateralInCamera QuadSemiAuto; // 半自动,可以给用户选择(赋值给Screen.QuadInCamera即生效)
  38. public ScreenMap Screen; // 识别到的屏幕,用于执行透视变换
  39. int capture = 0;
  40. int delay = 0;
  41. int maxCapture;
  42. int maxDelay;
  43. Geometry.Vector<float>[] ScreenBlackTexture;
  44. Geometry.Vector<float>[] ScreenWhiteTexture;
  45. int locateIndex = -1;
  46. readonly List<Rect> locateArea = new List<Rect> {
  47. new Rect(0f, 0f, 0.5f, 0.5f), new Rect(0.5f, 0f, 0.5f, 0.5f), new Rect(0f, 0.5f, 0.5f, 0.5f), new Rect(0.5f, 0.5f, 0.5f, 0.5f)
  48. }; // 屏幕显示白色的区域大小
  49. float areaPercent => locateArea[locateIndex].size.x; // 当前白色区域的占比
  50. int areaSelected = -1; // 选择哪个区域,顺序与Quadrilateral对应
  51. readonly List<float> sumTemp = new List<float>();
  52. readonly List<QuadrilateralInCamera> quadTemp = new List<QuadrilateralInCamera>();
  53. //public ScreenIdentification(WebCamTexture texture)
  54. //{
  55. // Size = new Geometry2D.Vector<int>(texture.width, texture.height);
  56. // Screen = new ScreenMap();
  57. //}
  58. public static UnityEngine.Color FloatValueToColor(float i)
  59. {
  60. return i switch
  61. {
  62. 1 => UnityEngine.Color.yellow,
  63. 2 => new UnityEngine.Color(0,1,1,1),
  64. 3 => UnityEngine.Color.green,
  65. 4 => UnityEngine.Color.white,
  66. 5 => UnityEngine.Color.red,
  67. _ => UnityEngine.Color.black,
  68. };
  69. }
  70. public ScreenIdentification()
  71. {
  72. Screen = new ScreenMap();
  73. //OnLocateScreenEnter += () => Application.targetFrameRate = 30; // 固定识别的帧率,确保摄像机拍到正确的画面
  74. //OnLocateScreenEnd += () => Application.targetFrameRate = 60;
  75. }
  76. public void SetScreenQuad(QuadrilateralInCamera quad) => Screen.QuadInCamera = quad;
  77. // 上一次半自动识别的情况,false代表这条边识别失败,线段顺序: 下、右、上、左
  78. public bool[] LastQuadSemiAutoState;
  79. public event Action OnLocateScreenEnter;
  80. public event Action OnLocateScreenEnd;
  81. public bool bStartLocateScreen { get; set; } = false;//是否进行捕获
  82. public bool SelectScreenAfterLocate(ScreenLocate.ScreenIdentificationTag tag)
  83. {
  84. QuadrilateralInCamera target = GetScreenAfterLocate(tag);
  85. if (target == null)
  86. return false;
  87. Debug.Log($"<color=aqua>[ScreenIdentification] 选择已识别到的屏幕({Enum.GetName(typeof(ScreenLocate.ScreenIdentificationTag), tag)}), {target}</color>");
  88. SetScreenQuad(target);
  89. return true;
  90. }
  91. public QuadrilateralInCamera GetScreenAfterLocate(ScreenLocate.ScreenIdentificationTag tag)
  92. {
  93. return tag switch
  94. {
  95. ScreenLocate.ScreenIdentificationTag.Manual => QuadManual,
  96. ScreenLocate.ScreenIdentificationTag.SemiAuto => QuadSemiAuto,
  97. ScreenLocate.ScreenIdentificationTag.Auto => QuadAuto,
  98. _ => null
  99. };
  100. }
  101. // 自动识别开始的入口
  102. public void LocateScreen(int Capture = 30, int Delay = 30) //数值单位是frame
  103. {
  104. if (ScreenLocate.Main.DebugScreenImages.Count != 0 && ScreenLocate.Main.DebugOnZIMDemo) // 这段仅用于测试图片
  105. {
  106. ScreenLocate.Main.CameraSize = new Geometry2D.Vector<int>(ScreenLocate.Main.DebugScreenImages[0].width, ScreenLocate.Main.DebugScreenImages[0].height);
  107. DebugImage(ScreenLocate.Main.DebugScreenImages);
  108. Screen.QuadInCamera = quadTemp[0];
  109. ScreenLocate.SetScreen(null);
  110. ScreenLocate.Main.ShowScreen(ScreenLocate.Main.ScreenQuad, Screen.QuadInCamera);
  111. delay = 0;
  112. capture = 0;
  113. ScreenWhiteTexture = null;
  114. ScreenBlackTexture = null;
  115. locateIndex = -1;
  116. areaSelected = -1;
  117. quadTemp.Clear();
  118. sumTemp.Clear();
  119. ScreenLocate.Main.DebugScreenImages.Clear();
  120. return;
  121. }
  122. delay = Math.Max(Delay, 5);
  123. capture = Math.Max(Capture, 5);
  124. maxDelay = Delay;
  125. maxCapture = Capture;
  126. ScreenLocate.SetScreen(new Rect(0f, 0f, 1f, 1f), UnityEngine.Color.black);
  127. //ScreenLocate.SetScreen(new Rect(0f, 0f, 0.6f, 0.6f), UnityEngine.Color.white);
  128. //bStartLocateScreen = false;
  129. ScreenWhiteTexture = null;
  130. ScreenBlackTexture = null;
  131. OnLocateScreenEnter?.Invoke();
  132. }
  133. /// <summary>
  134. /// 开始进行捕获
  135. /// 初始化了两个数据 capture 和 delay
  136. /// </summary>
  137. /// <returns></returns>
  138. public bool isInitLocateScreen()
  139. {
  140. return capture != 0 && delay != 0;
  141. }
  142. void DebugImage(List<Texture2D> images)
  143. {
  144. QuadrilateralFit(images);
  145. //var watch = new System.Diagnostics.Stopwatch();
  146. //watch.Start();
  147. //var times = new List<double>() { 0.0 };
  148. #if (!NDEBUG && DEBUG && ENABLE_LOG)
  149. Console.WriteLine($"{TAG} quadTemp.Count:{quadTemp.Count}");
  150. #endif
  151. if (quadTemp.Count > 0)
  152. {
  153. ScreenLocate.Main.ShowScreen(ScreenLocate.Main.outputRawImages[4].transform.GetChild(0) as RectTransform, quadTemp[0]);
  154. // 透视变换
  155. // var srcWidth = LocateLightedRedTex.width;
  156. // var transformWidth = (int)((quad.B.x - quad.A.x + quad.D.x - quad.C.x) / 2);
  157. // var transformHeight = (int)((quad.C.y - quad.A.y + quad.D.y - quad.B.y) / 2);
  158. // var transformTex = new Texture2D(transformWidth, transformHeight);
  159. // var pt = new ZIMPerspectiveTransform(new OrdinalQuadrilateral(new Vector(0, 0), new Vector(transformWidth, 0), new Vector(0, transformHeight), new Vector(transformWidth, transformHeight)), quad);
  160. // var dstPixel = new UnityEngine.Color[transformWidth * transformHeight];
  161. // var srcPixel = LocateLightedRedTex.GetPixels();
  162. // Parallel.For(0, transformWidth, (x) =>
  163. // {
  164. // for (int y = 0; y < transformHeight; y++)
  165. // {
  166. // var index = y * transformWidth + x;
  167. // var sampleCoord = pt.TransformRound(x, y);
  168. // dstPixel[index] = srcPixel[sampleCoord.y * srcWidth + sampleCoord.x];
  169. // }
  170. // });
  171. // transformTex.SetPixels(dstPixel);
  172. // transformTex.Apply();
  173. // //ScreenLocate.DebugTexture(1, transformTex);
  174. //#if (!NDEBUG && DEBUG && ENABLE_LOG)
  175. // Console.WriteLine($"{TAG} ScreenLocate.DebugTexture 1:{transformTex.GetNativeTexturePtr()}");
  176. //#endif
  177. }
  178. //times.Add(watch.ElapsedMilliseconds);
  179. //Debug.Log("time: " + (times[times.Count - 1] - times[times.Count - 2]));
  180. }
  181. public void NextScreen()
  182. {
  183. // 只识别一次色差变化
  184. if (LocateSingleStep && areaSelected == -1)
  185. {
  186. LocateAreaData = new Rect[][] { new Rect[] { new Rect(0, 0, 1f, 1f) } };
  187. locateIndex = 3;
  188. areaSelected = 0;
  189. locateArea.AddRange(LocateAreaData[0]);
  190. }
  191. // index从-1开始
  192. locateIndex++;
  193. if (locateIndex < locateArea.Count) // 依次点亮屏幕区域
  194. {
  195. ScreenLocate.SetScreen(locateArea[locateIndex], UnityEngine.Color.white);
  196. delay = maxDelay;
  197. capture = maxCapture;
  198. }
  199. else // 退出屏幕黑白控制
  200. {
  201. ScreenLocate.SetScreen(null);
  202. ScreenLocate.Main.ShowScreen(ScreenLocate.Main.ScreenQuad, Screen.QuadInCamera);
  203. Reset();
  204. }
  205. }
  206. // 清除记录的屏幕识别数据(手动、自动等)
  207. public void ClearQuadCache()
  208. {
  209. SetScreenQuad(null);
  210. QuadManual = null;
  211. QuadSemiAuto = null;
  212. QuadAuto = null;
  213. }
  214. public void Reset()
  215. {
  216. // bStartLocateScreen = false;
  217. delay = 0;
  218. capture = 0;
  219. ScreenWhiteTexture = null;
  220. ScreenBlackTexture = null;
  221. locateIndex = -1;
  222. areaSelected = -1;
  223. if (locateArea.Count > 4)
  224. locateArea.RemoveRange(4, LocateAreaData[0].Length);
  225. quadTemp.Clear();
  226. sumTemp.Clear();
  227. }
  228. public void CaptureBlack(Texture2D cam)
  229. {
  230. if (ScreenBlackTexture == null)
  231. ScreenBlackTexture = new Geometry.Vector<float>[Size.x * Size.y];
  232. var pixel = cam.GetPixels();
  233. Parallel.For(0, Size.x * Size.y, i =>
  234. {
  235. var ip = pixel[i];
  236. ScreenBlackTexture[i] += new Geometry.Vector<float>(ip.r / maxCapture, ip.g / maxCapture, ip.b / maxCapture);
  237. });
  238. }
  239. public void CaptureWhite(Texture2D cam)
  240. {
  241. if (ScreenWhiteTexture == null)
  242. ScreenWhiteTexture = new Geometry.Vector<float>[Size.x * Size.y];
  243. var pixel = cam.GetPixels();
  244. Parallel.For(0, Size.x * Size.y, i =>
  245. {
  246. var ip = pixel[i];
  247. ScreenWhiteTexture[i] += new Geometry.Vector<float>(ip.r / maxCapture, ip.g / maxCapture, ip.b / maxCapture);
  248. });
  249. }
  250. public void CaptureStay(Texture2D cam)
  251. {
  252. if (locateIndex == -1) // 屏幕黑色
  253. {
  254. CaptureBlack(cam);
  255. }
  256. else // 屏幕部分为白色
  257. {
  258. CaptureWhite(cam);
  259. }
  260. }
  261. public void CaptureEnd()
  262. {
  263. //Debug.Log("locateIndex: " + locateIndex + ", quad: " + quadTemp.Count);
  264. if (locateIndex == -1)
  265. return;
  266. if (locateIndex < 4)
  267. {
  268. sumTemp.Add(GetBrightness());
  269. ScreenWhiteTexture = null;
  270. // 选择亮度差最大的区域
  271. if (locateIndex == 3)
  272. {
  273. areaSelected = sumTemp.MaxIndex();
  274. locateArea.AddRange(LocateAreaData[areaSelected]);
  275. }
  276. }
  277. else if (locateIndex >= 4 && locateIndex < locateArea.Count - 1)
  278. {
  279. QuadrilateralFit();
  280. ScreenWhiteTexture = null;
  281. }
  282. else
  283. {
  284. QuadrilateralFit();
  285. if (quadTemp.Count != LocateAreaData[0].Length)
  286. {
  287. Debug.Log($"<color=yellow>[ScreenIdentification] 拟合四边形失败, quadTemp.Count: {quadTemp.Count}</color>");
  288. }
  289. else if (quadTemp.Count == 1)
  290. {
  291. SetScreenQuad(quadTemp[0]);
  292. Debug.Log($"[ScreenIdentification] 拟合成功,识别数据: {Screen.QuadInCamera}");
  293. }
  294. else
  295. {
  296. // Debug.Log($"拟合四边形 2 , quadTemp.Count: {quadTemp.Count}");
  297. // 线性拟合
  298. var xValue = new List<float>() { 0 };
  299. var predicts = new List<Vector>();
  300. foreach (var i in LocateAreaData[0])
  301. xValue.Add(i.size.x);
  302. Vector baseVertex = Vector.Zero; // x==0 时的点
  303. {
  304. foreach (var q in quadTemp)
  305. {
  306. baseVertex += q.Quad[areaSelected];
  307. }
  308. baseVertex /= quadTemp.Count;
  309. }
  310. double rs = 0.0;
  311. for (int i = 0; i < 4; i++)
  312. {
  313. if (i == areaSelected)
  314. {
  315. predicts.Add(baseVertex);
  316. }
  317. else
  318. {
  319. var yValue = new List<Vector>() { baseVertex };
  320. foreach (var q in quadTemp)
  321. {
  322. yValue.Add(q.Quad[i]);
  323. }
  324. var lr = LinerRegression1D.Fit(2, xValue.ToArray(), yValue.ToArray());
  325. rs += lr.RSquared / 3;
  326. predicts.Add(lr.Predict<Vector>(1));
  327. }
  328. }
  329. SetScreenQuad(new QuadrilateralInCamera(predicts, new Vector(Size.x, Size.y)));
  330. Debug.Log($"[ScreenIdentification] 拟合成功,RSquared: {rs}, Quad: {Screen.QuadInCamera}");
  331. //if (rs < 0.8) Screen.Quad = null;
  332. }
  333. OnLocateScreenEnd?.Invoke();
  334. }
  335. }
  336. public bool Update(Texture2D cam)
  337. {
  338. //if (!bStartLocateScreen) return false;
  339. if (delay != 0)
  340. {
  341. //ScreenLocate.Main.CreateUVCTexture2DFocusSizeIfNeeded(1280, 720);
  342. delay--;
  343. if (delay == 0)
  344. {
  345. ScreenLocate.Main.CameraSize = new Geometry2D.Vector<int>(cam.width, cam.height); // 记录当前的分辨率
  346. Debug.Log("[ScreenIdentification] 采样纹理,记录采样分辨率: [" + Size.x + ", " + Size.y + "]");
  347. }
  348. return true;
  349. }
  350. if (capture != 0)
  351. {
  352. //ScreenLocate.Main.CreateUVCTexture2DFocusSizeIfNeeded(1280, 720);
  353. CaptureStay(cam);
  354. capture--;
  355. if (capture == 0)
  356. {
  357. CaptureEnd();
  358. NextScreen();
  359. }
  360. return true;
  361. }
  362. return false;
  363. #region Old
  364. /*
  365. if (delay != 0)
  366. {
  367. delay--;
  368. return true;
  369. }
  370. if (capture != 0)
  371. {
  372. capture--;
  373. if (ScreenBlackTexture == null)
  374. ScreenBlackTexture = new Geometry.Vector<float>[Size.x * Size.y];
  375. var pixel = cam.GetPixels();
  376. Parallel.For(0, Size.x * Size.y, i =>
  377. {
  378. var ip = pixel[i];
  379. ScreenBlackTexture[i] += new Geometry.Vector<float>(ip.r, ip.g, ip.b);
  380. });
  381. if (capture == 0)
  382. ScreenLocate.SetScreen(UnityEngine.Color.black);
  383. return true;
  384. }
  385. if (delay != 0)
  386. {
  387. delay--;
  388. return true;
  389. }
  390. if (capture != 0)
  391. {
  392. capture--;
  393. if (ScreenWhiteTexture == null)
  394. ScreenWhiteTexture = new Geometry.Vector<float>[Size.x * Size.y];
  395. var pixel = cam.GetPixels();
  396. Parallel.For(0, Size.x * Size.y, i =>
  397. {
  398. var ip = pixel[i];
  399. ScreenWhiteTexture[i] += new Geometry.Vector<float>(ip.r, ip.g, ip.b);
  400. });
  401. if (capture == 0)
  402. ScreenLocate.SetScreen(UnityEngine.Color.black);
  403. return true;
  404. }
  405. if (delay != 0)
  406. {
  407. delay--;
  408. return true;
  409. }
  410. if (capture != 0)
  411. {
  412. capture--;
  413. var pixel = cam.GetPixels();
  414. Parallel.For(0, Size.x * Size.y, i =>
  415. {
  416. var ip = pixel[i];
  417. ScreenWhiteTexture[i] -= new Geometry.Vector<float>(ip.r, ip.g, ip.b);
  418. });
  419. if (capture == 0)
  420. {
  421. ScreenLocate.SetScreen(null);
  422. UnityEngine.Color[] newPixel = new UnityEngine.Color[Size.x * Size.y];
  423. Parallel.For(0, Size.x * Size.y, i => {
  424. var pi = ScreenWhiteTexture[i] /= capture;
  425. newPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z);
  426. });
  427. //读取数据
  428. //{
  429. // var fileName = "3.bin";
  430. // ScreenLocateTexture = $"2023 04 16 厦门测试数据/{fileName}".FileReadByte<Vector<float>[]>();
  431. // Debug.Log($"Read {fileName}");
  432. // Parallel.For(0, Size.x * Size.y, i =>
  433. // {
  434. // var pi = ScreenLocateTexture[i];
  435. // newPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z);
  436. // });
  437. //}
  438. var ScreenLocateTex = new Texture2D(Size.x, Size.y);
  439. ScreenLocateTex.SetPixels(newPixel);
  440. ScreenLocateTex.Apply();
  441. //ScreenLocate.DebugTexture(2, ScreenLocateTex);
  442. var ScreenLocateTexLighted = ScreenLocateTex.AutoLight(10);
  443. //ScreenLocate.DebugTexture(2, ScreenLocateTexLighted);
  444. //var FileSavePath = Application.persistentDataPath + "/ScreenLocateTexture.bin";
  445. bool Save = ScreenLocate.Main.SaveToggle.isOn;
  446. string time;
  447. if (Save)
  448. {
  449. time = DateTime.Now.ToString("yyyyMMdd_HHmmss");
  450. var FileSavePath = $"屏幕定位数据{time}.bin";
  451. FileSavePath.FileWriteByte(ScreenWhiteTexture);
  452. var bytes = ScreenLocateTexLighted.EncodeToPNG();
  453. File.WriteAllBytes($"屏幕定位数据{time}.png", bytes);
  454. Debug.Log("ScreenLocateTexture Saved To: " + FileSavePath);
  455. }
  456. var ScreenLocateTexR = ScreenLocateTexLighted.ToRGB(ColorChannel.Red);
  457. var ScreenLocateTexG = ScreenLocateTexLighted.ToRGB(ColorChannel.Green);
  458. var ScreenLocateTexB = ScreenLocateTexLighted.ToRGB(ColorChannel.Blue);
  459. ScreenLocate.DebugTexture(2, ScreenLocateTexR);
  460. //ScreenLocate.DebugTexture(4, ScreenLocateTexG);
  461. //ScreenLocate.DebugTexture(5, ScreenLocateTexB);
  462. var watch = new System.Diagnostics.Stopwatch();
  463. watch.Start();
  464. var times = new List<double>() { 0.0 };
  465. var ScreenLocateTexLightedMat = ScreenLocateTexLighted.Too0Mat();
  466. //var ScreenLocateTexLightedMat = texture.Too0Mat();
  467. //var (edge, edgeDir) = ScreenLocateTexLightedMat.IdentifyEdge();
  468. var (edge, edgeDir) = ScreenLocateTexLightedMat.zimIdentifyEdgeGradientAny(15);
  469. //ScreenLocate.DebugTexture(4, ScreenLocateTexLighted.Too0Mat().IdentifyEdgeGradient().ToTex());
  470. //ScreenLocate.DebugTexture(4, edge.ToTex());
  471. var quadLines = ScreenLocateTexLightedMat.IdentifyQuadLSD(edge, edgeDir, out List<Line> lightLines, 30);
  472. var drawLineMap = new MatrixF2D(edge..Size.x, edge.Size.y);
  473. int lineCount = 0;
  474. foreach (var l in quadLines)
  475. {
  476. if (l != null)
  477. {
  478. o0Extension.DrawLine(drawLineMap.DrawLine(l, (x, y) => 1, new Geometry2D.Float.Vector(0, 10));
  479. lineCount++;
  480. }
  481. }
  482. if (lineCount == 4)
  483. {
  484. var a = quadLines[0].Intersect(quadLines[3], false).Value;
  485. var b = quadLines[0].Intersect(quadLines[1], false).Value;
  486. var c = quadLines[2].Intersect(quadLines[3], false).Value;
  487. var d = quadLines[1].Intersect(quadLines[2], false).Value;
  488. Quad = new Quadrilateral(a, b, c, d);
  489. if (!Quad.IsInScreen(ScreenLocate.Main.WebCamera.Size))
  490. Quad = null;
  491. }
  492. ScreenLocate.Main.ShowScreen(Quad);
  493. //var lines = edge.IdentifyLineLSD(edgeDir, 100);
  494. ////var lines = ScreenLocateTexLightedMat.IdentifyLineLSD();
  495. //var drawLineMap = new MatrixF2D(edge..Size.x, edge.Size.y);
  496. //var returnMaxLines = lines.Sub(0, 10);
  497. //foreach (var (line, sum, gradient) in returnMaxLines)
  498. // o0Extension.DrawLine(drawLineMap.DrawLine(line, (x, y) => 1, new Geometry2D.Float.Vector(0, 10));
  499. ScreenLocate.DebugTexture(3, drawLineMap.ToTex());
  500. //{
  501. // var bytes = drawLineMap.ToTex().EncodeToPNG();
  502. // File.WriteAllBytes($"屏幕定位数据DrawLineMap.png", bytes);
  503. //}
  504. times.Add(watch.ElapsedMilliseconds);
  505. Debug.Log("time: " + (times[times.Count - 1] - times[times.Count - 2]));
  506. //ScreenLocate.DebugTexture(5, edge.IdentifyLine(edgeDir).ToTex());
  507. //ScreenLocate.DebugTexture(4, ScreenLocateTexLighted.Too0Mat().IdentifyEdgeGradientX().ToTex());
  508. //ScreenLocate.DebugTexture(5, ScreenLocateTexLighted.Too0Mat().IdentifyEdgeGradientY().ToTex());
  509. //var convolutionLighted2 = ScreenLocateTexLighted.Too0Mat().IdentifyEdgeVariance().ToTex();
  510. // opecncv处理
  511. // zim
  512. {
  513. //var cvLines = edge.cvHoughLinesP();
  514. //ScreenLocate.DebugTexture(5, cvLines);
  515. //var myLines = Hough.Transform(edgeMat);
  516. //var cvLines = edge.cvLine(myLines);
  517. //ScreenLocate.DebugTexture(5, cvLines);
  518. }
  519. UnityEngine.Object.Destroy(ScreenLocateTex);
  520. //ScreenLocate.DebugTexture(4, convolutionLighted2);
  521. }
  522. return true;
  523. }
  524. /*
  525. var avg = new Geometry4D.Vector<float>();
  526. var pixel = texture.GetPixels();
  527. foreach(var i in pixel.Index())
  528. {
  529. var iP = pixel[i];
  530. avg += new Geometry4D.Vector<float>(iP.r, iP.g, iP.b, iP.a);
  531. }
  532. avg /= pixel.Count();
  533. /*
  534. var (texLightedR, texLightedG, texLightedB) = ToRGB(newTex);
  535. ScreenLocate.DebugTexture(3, texLightedR);
  536. ScreenLocate.DebugTexture(4, texLightedG);
  537. ScreenLocate.DebugTexture(5, texLightedB);
  538. //Debug.Log(avg);
  539. return false;
  540. /**/
  541. #endregion
  542. }
  543. float GetBrightness()
  544. {
  545. UnityEngine.Color[] differPixel = new UnityEngine.Color[Size.x * Size.y];
  546. Parallel.For(0, Size.x * Size.y, i =>
  547. {
  548. var pi = ScreenWhiteTexture[i] - ScreenBlackTexture[i];
  549. differPixel[i] = new UnityEngine.Color(pi.x, pi.y, pi.z);
  550. });
  551. var sum = 0f;
  552. foreach (var i in differPixel)
  553. {
  554. sum += i.Brightness();
  555. }
  556. sum /= differPixel.Length;
  557. //Debug.Log(sum);
  558. return sum;
  559. }
  560. // 转换成屏幕定位所需的纹理图像
  561. Texture2D ToLocateTex(UnityEngine.Color[] pixels)
  562. {
  563. var ScreenLocateTex = new Texture2D(Size.x, Size.y);
  564. ScreenLocateTex.SetPixels(pixels);
  565. ScreenLocateTex.Apply();
  566. //ScreenLocate.DebugTexture(2, ScreenLocateTex);
  567. return ScreenLocateTex.AutoLight(10);
  568. //ScreenLocate.DebugTexture(2, ScreenLocateTexLighted);
  569. //var ScreenLocateTexR = ToLocateTex.ToRGB(ColorChannel.Red);
  570. //var ScreenLocateTexG = ToLocateTex.ToRGB(ColorChannel.Green);
  571. //var ScreenLocateTexB = ToLocateTex.ToRGB(ColorChannel.Blue);
  572. //LocateLightedRedTex = ScreenLocateTexR;
  573. //ScreenLocate.DebugTexture(2, ScreenLocateTexR);
  574. //ScreenLocate.DebugTexture(4, ScreenLocateTexG);
  575. //ScreenLocate.DebugTexture(5, ScreenLocateTexB);
  576. //var ScreenLocateTexLightedMat = texture.Too0Mat();
  577. }
  578. /// <param name="lineWidth">识别的最小线段长度</param>
  579. /// <param name="debugImages">这个参数如果不为null且数量大于0,则执行debug操作</param>
  580. void QuadrilateralFit(List<Texture2D> debugImages = null)
  581. {
  582. // 屏幕黑白差值,存放多批次的图像用于识别, 该List数量不能等于 0
  583. List<UnityEngine.Color[]> PixelsMultipleBatches = new List<UnityEngine.Color[]>();
  584. //var sw = new System.Diagnostics.Stopwatch();
  585. //sw.Start();
  586. //读取数据
  587. if (debugImages != null && debugImages.Count != 0)
  588. {
  589. var dSize = debugImages.First().Size();
  590. foreach (var i in debugImages)
  591. {
  592. Debug.Log($"<color=aqua>Debug {i.name}</color>");
  593. if (i.Size() != dSize)
  594. throw new InvalidOperationException("Multiple Debug textures have different sizes");
  595. PixelsMultipleBatches.Add(i.GetPixels());
  596. }
  597. }
  598. else // 获得屏幕差值
  599. {
  600. var maxWhite = 0f;
  601. foreach (var i in ScreenWhiteTexture)
  602. {
  603. var m = i.x > i.y ? (i.x > i.z ? i.x : i.z) : (i.y > i.z ? i.y : i.z);
  604. if (maxWhite < m)
  605. maxWhite = m;
  606. }
  607. var scale = 1.0f / maxWhite; // 放大对比度
  608. var differPixel = new UnityEngine.Color[Size.x * Size.y];
  609. var whitePixel = new UnityEngine.Color[Size.x * Size.y];
  610. Parallel.For(0, Size.x, x =>
  611. {
  612. for (int y = 0; y < Size.y; y++)
  613. {
  614. var i = y * Size.x + x;
  615. var d = ScreenWhiteTexture[i] * scale - ScreenBlackTexture[i];
  616. differPixel[i] = new UnityEngine.Color(d.x, d.y, d.z);
  617. whitePixel[i] = new UnityEngine.Color(ScreenWhiteTexture[i].x, ScreenWhiteTexture[i].y, ScreenWhiteTexture[i].z) * scale;
  618. }
  619. });
  620. PixelsMultipleBatches.Add(differPixel); // 色差图
  621. PixelsMultipleBatches.Add(whitePixel); // 原图
  622. }
  623. int conSize = (int)Math.Ceiling(0.007f * Size.y) * 2 + 1;
  624. conSize = Math.Max(conSize, 7); // 设置最小为7
  625. float minLength = conSize * 6f;
  626. minLength = locateIndex == -1 ? minLength : minLength * areaPercent; // minLength需要按areaPercent比例缩小
  627. string log = $"[Log][ScreenLocate Auto] Size: ({Size.x},{Size.y}), 卷积核Size: {conSize}, 最小线段长度: {minLength}";
  628. var allLines = new List<LineIdentified>();
  629. List<Texture2D> LocateTexTemp = new List<Texture2D>();
  630. List<Matrix> ScreenLocateMatList = new List<Matrix>();
  631. foreach (var batch in PixelsMultipleBatches.Index())
  632. {
  633. var locateTex = ToLocateTex(PixelsMultipleBatches[batch]);
  634. LocateTexTemp.Add(locateTex);
  635. var ScreenLocateMat = locateTex.Too0Mat(); // 用于获取Lines的Matrix
  636. var lineCount = ZIMIdentifyQuadLSD(
  637. ref allLines,
  638. batch,
  639. ScreenLocateMat.zimIdentifyEdgeGradientAny(conSize),
  640. minLength,
  641. new Vector(minLength * 0.4f, conSize * 1.6f));
  642. log += $"\r\n识别图片{batch}, 识别到的线段数量为: {lineCount}";
  643. ScreenLocateMatList.Add(ScreenLocateMat);
  644. }
  645. Texture2D ScreenLocateTexture = LocateTexTemp[0]; // for output
  646. // LSD计算得到的矩阵尺寸较小(因为卷积),这里必须进行位移
  647. // 新增:根据阈值筛去梯度太低的线段
  648. float minGradient = 0.08f;
  649. var offset = new Vector((conSize - 1) / 2, (conSize - 1) / 2);
  650. var tempList = new List<LineIdentified>();
  651. for (int i = 0; i < allLines.Count; i++)
  652. {
  653. var l = allLines[i];
  654. if (l.Gradient > minGradient * l.Line.Length)
  655. {
  656. l.Offset(offset);
  657. tempList.Add(l);
  658. }
  659. }
  660. allLines = tempList;
  661. log += $"\r\n根据梯度阈值筛选,最终线段数量为: {allLines.Count}";
  662. // 如果有手动数据,刷新一下Size
  663. QuadManual?.ReSize(new Vector(Size.x, Size.y), ScreenMap.ViewAspectRatioSetting);
  664. // 估算屏幕中点,如果已有手动定位数据,根据现有数据取平均即可,否则从色差计算,ScreenLocateMatList[0]默认是屏幕的黑白色差
  665. Vector AvgPoint = QuadManual != null ? QuadManual.Quad.Centroid : GetAvgPoint(ScreenLocateMatList[0]);
  666. // 过滤得到四边形的四条边,
  667. var (quadLinesSemiAuto, quadLinesAuto) = FilterLines(
  668. ScreenLocateMatList,
  669. allLines,
  670. AvgPoint,
  671. out LineIdentified[] manualLines,
  672. out List<LineIdentified> possibleLines,
  673. conSize,
  674. minLength);
  675. #region 全自动识别的结果
  676. List<LineIdentified> LineIdentifiedAuto = new List<LineIdentified>(); // 线段顺序: 下、右、上、左
  677. for (int i = 0; i < 4; i++)
  678. {
  679. if (quadLinesAuto[i] != null)
  680. LineIdentifiedAuto.Add(quadLinesAuto[i]);
  681. }
  682. if (LineIdentifiedAuto.Count == 4) // 判断识别的线段能否拼成屏幕,能拼成则记录
  683. {
  684. var a = LineIdentifiedAuto[0].Line.Intersect(LineIdentifiedAuto[3].Line, false).Value;
  685. var b = LineIdentifiedAuto[0].Line.Intersect(LineIdentifiedAuto[1].Line, false).Value;
  686. var c = LineIdentifiedAuto[2].Line.Intersect(LineIdentifiedAuto[3].Line, false).Value;
  687. var d = LineIdentifiedAuto[1].Line.Intersect(LineIdentifiedAuto[2].Line, false).Value;
  688. QuadAuto = new QuadrilateralInCamera(a, b, c, d, new Vector(Size.x, Size.y));
  689. if (!QuadAuto.IsQuadComplete())
  690. QuadAuto = null;
  691. }
  692. #endregion
  693. #region 半自动识别
  694. List<LineIdentified> LineIdentifiedSemiAuto = new List<LineIdentified>(); // 线段顺序: 下、右、上、左
  695. LastQuadSemiAutoState = new bool[4] { true, true, true, true };
  696. for (int i = 0; i < 4; i++)
  697. {
  698. if (quadLinesSemiAuto[i] != null)
  699. LineIdentifiedSemiAuto.Add(quadLinesSemiAuto[i]);
  700. else if (manualLines != null)
  701. {
  702. LineIdentifiedSemiAuto.Add(manualLines[i]);
  703. LastQuadSemiAutoState[i] = false;
  704. }
  705. }
  706. if (LineIdentifiedSemiAuto.Count == 4) // 判断识别的线段能否拼成屏幕,能拼成则记录
  707. {
  708. var a = LineIdentifiedSemiAuto[0].Line.Intersect(LineIdentifiedSemiAuto[3].Line, false).Value;
  709. var b = LineIdentifiedSemiAuto[0].Line.Intersect(LineIdentifiedSemiAuto[1].Line, false).Value;
  710. var c = LineIdentifiedSemiAuto[2].Line.Intersect(LineIdentifiedSemiAuto[3].Line, false).Value;
  711. var d = LineIdentifiedSemiAuto[1].Line.Intersect(LineIdentifiedSemiAuto[2].Line, false).Value;
  712. QuadSemiAuto = new QuadrilateralInCamera(a, b, c, d, new Vector(Size.x, Size.y));
  713. if (!QuadSemiAuto.IsQuadComplete())
  714. QuadSemiAuto = null;
  715. }
  716. #endregion
  717. // 优先应用自动的结果(也可以在外部手动设置)
  718. if (QuadSemiAuto == null && QuadAuto == null && Screen.QuadInCamera != null) // 如果可能,回退到上一个screen
  719. {
  720. Debug.Log($"<color=yellow>[ScreenIdentification] 本次识别失败,回退到上次的识别结果: {Screen.QuadInCamera}</color>");
  721. quadTemp.Add(Screen.QuadInCamera);
  722. }
  723. else if (QuadAuto != null)
  724. {
  725. Debug.Log($"<color=aqua>[ScreenIdentification] 识别到四边形(全自动): {QuadAuto}</color>");
  726. quadTemp.Add(QuadAuto);
  727. }
  728. else if (QuadSemiAuto != null)
  729. {
  730. Debug.Log($"<color=aqua>[ScreenIdentification] 识别到四边形(半自动): {QuadSemiAuto}</color>");
  731. quadTemp.Add(QuadSemiAuto);
  732. }
  733. #region 绘制 output texture
  734. // 绘制半自动
  735. var ScreenQuadMap = new Matrix(Size, Tiling: true); // 识别的到的屏幕四边形(半自动和自动在一张图上)
  736. foreach (var i in LineIdentifiedSemiAuto.Index())
  737. {
  738. if (LastQuadSemiAutoState[i])
  739. o0Extension.DrawLine(ScreenQuadMap, LineIdentifiedSemiAuto[i].DrawLine, (x, y) => 5, new Geometry2D.Float.Vector(0, 10));
  740. else
  741. o0Extension.DrawLine(ScreenQuadMap, LineIdentifiedSemiAuto[i].DrawLine, (x, y) => 3, new Geometry2D.Float.Vector(0, 6), true);
  742. }
  743. // 绘制全自动
  744. foreach (var i in LineIdentifiedAuto.Index())
  745. o0Extension.DrawLine(ScreenQuadMap, LineIdentifiedAuto[i].DrawLine, (x, y) => 4, new Geometry2D.Float.Vector(0, 4), true);
  746. Texture2D ScreenQuad = ScreenQuadMap.ToTexRGBA(FloatValueToColor);
  747. Texture2D ScreenQuadWithScreen = ScreenQuad.Overlay(ScreenLocateTexture); // 叠加屏幕色差图
  748. // 绘制allLines
  749. var allLinesMap = new Matrix(Size, Tiling: true);
  750. foreach (var l in allLines)
  751. {
  752. if (l.DrawLine != null)
  753. o0Extension.DrawLine(allLinesMap, l.DrawLine, (x, y) => 1, new Geometry2D.Float.Vector(0, 2), true);
  754. }
  755. var allLinesTex = allLinesMap.ToTexRGBA(FloatValueToColor);
  756. ScreenLocate.DebugTexture(1, allLinesTex);
  757. // 还需要输出一张识别结果图,包含干扰线段
  758. var ChoosableLineMap = new Matrix(Size, Tiling: true);
  759. foreach (var l in possibleLines)
  760. {
  761. if (l != null && !quadLinesSemiAuto.Contains(l) && !manualLines.Contains(l))
  762. o0Extension.DrawLine(ChoosableLineMap, l.DrawLine, (x, y) => 1, new Geometry2D.Float.Vector(0, 2), true); // 其他的备选线段
  763. }
  764. foreach (var l in LineIdentifiedSemiAuto)
  765. {
  766. if (l != null)
  767. o0Extension.DrawLine(ChoosableLineMap, l.DrawLine, (x, y) => 5, new Geometry2D.Float.Vector(0, 5)); // 识别的结果
  768. }
  769. if (manualLines != null)
  770. {
  771. foreach (var l in manualLines)
  772. o0Extension.DrawLine(ChoosableLineMap, l.DrawLine, (x, y) => 3, new Geometry2D.Float.Vector(0, 2), true); // 旧的屏幕线段(例如上次手动识别的)
  773. }
  774. Texture2D ChoosableLineTex = ChoosableLineMap.ToTexRGBA(FloatValueToColor);
  775. #endregion
  776. log += $"\r\n屏幕四边形_手动识别{QuadManual != null}\r\n屏幕四边形_半自动识别{QuadSemiAuto != null}\r\n屏幕四边形_全自动识别{QuadAuto != null}";
  777. Debug.Log(log);
  778. // 是否将图片保存到本地
  779. if (ScreenLocate.Main.SaveToggle?.isOn ?? false && ScreenLocate.Main.DebugOnZIMDemo)
  780. {
  781. var FileDirectory = $"Debug_屏幕定位/";
  782. SaveImages(FileDirectory, log, ScreenLocateTexture, allLinesTex, ChoosableLineTex, ScreenQuad);
  783. }
  784. //times.Add(watch.ElapsedMilliseconds);
  785. //Debug.Log("time: " + (times[times.Count - 1] - times[times.Count - 2]));
  786. // opecncv处理, zim
  787. {
  788. //var cvLines = edge.cvHoughLinesP();
  789. //ScreenLocate.DebugTexture(5, cvLines);
  790. //var myLines = Hough.Transform(edgeMat);
  791. //var cvLines = edge.cvLine(myLines);
  792. //ScreenLocate.DebugTexture(5, cvLines);
  793. }
  794. {
  795. ScreenLocate.DebugTexture(2, ScreenLocateTexture);
  796. ScreenLocate.DebugTexture(3, ScreenQuad);
  797. ScreenLocate.DebugTexture(4, ScreenQuadWithScreen);
  798. ScreenLocate.DebugTexture(5, ChoosableLineTex);
  799. }
  800. foreach (var i in LocateTexTemp)
  801. {
  802. if (i != ScreenLocateTexture) // ScreenLocateTexture 由 ScreenLocate.DebugTexture 释放
  803. GameObject.Destroy(i);
  804. }
  805. }
  806. Vector GetAvgPoint(Matrix screenLocateMat)
  807. {
  808. // 加权平均
  809. Vector[] avgPointsColumn = new Vector[screenLocateMat.Size.x];
  810. float[] valueSumsColumn = new float[screenLocateMat.Size.x];
  811. Parallel.For(0, screenLocateMat.Size.x, i =>
  812. {
  813. for (int j = 0; j < screenLocateMat.Size.y; j++)
  814. {
  815. var value = screenLocateMat[i, j];
  816. valueSumsColumn[i] += value;
  817. avgPointsColumn[i] += new Vector(i, j) * value;
  818. }
  819. });
  820. Vector avgPoint = Vector.Zero;
  821. var valueSum = 0f;
  822. for (int i = 0; i < screenLocateMat.Size.x; i++)
  823. {
  824. avgPoint += avgPointsColumn[i];
  825. valueSum += valueSumsColumn[i];
  826. }
  827. avgPoint /= valueSum;
  828. return avgPoint;
  829. }
  830. // 返回查找到的线段数量,0是查找失败
  831. int ZIMIdentifyQuadLSD(ref List<LineIdentified> allLines, int batch, (Matrix edgeMat, Matrix edgeDirMat) edgeGradient,
  832. float minLength, Vector LineCaptureSize)
  833. {
  834. var l = edgeGradient.edgeMat.IdentifyLineLSD(edgeGradient.edgeDirMat, minLength, 25, LineCaptureSize);
  835. if (l == null || l.Count == 0)
  836. return 0;
  837. allLines.AddRange(l.Select((i) => new LineIdentified(batch, i)));
  838. return l.Count;
  839. }
  840. // 返回四边形的四条边(半自动、全自动),List长度一定是4 (如果没有识别到就是null),且线段顺序是: 下、右、上、左
  841. (List<LineIdentified>, List<LineIdentified>) FilterLines(List<Matrix> screenLocateMatList, List<LineIdentified> allLines, Vector avgPoint,
  842. out LineIdentified[] manualLines, out List<LineIdentified> possibleLines, float gradientLength, float minLength = 100)
  843. {
  844. // 筛掉椭圆框外的线段(超出一半会筛掉)
  845. var innerLines = new List<LineIdentified>();
  846. for (int i = 0; i < allLines.Count; i++)
  847. {
  848. List<Vector> InArea = new List<Vector>();
  849. var dir = (allLines[i].Line.B - allLines[i].Line.A) / 4;
  850. var points = new Vector[5] { allLines[i].Line.A, allLines[i].Line.A + dir, allLines[i].Line.A + dir * 2f, allLines[i].Line.A + dir * 3f, allLines[i].Line.B }; // A点、中间的点、B点
  851. for (int pI = 0; pI < points.Length; pI++)
  852. {
  853. if (!ScreenLocate.Main.ScreenPixelCheaker.OutArea2D(points[pI], Size))
  854. InArea.Add(points[pI]);
  855. }
  856. if (InArea.Count < 2) // 少于2个点在内部
  857. continue;
  858. else if (InArea.Count < points.Length) // 不完全在内部
  859. allLines[i].DrawLine = new Line(InArea.First(), InArea.Last()); // 将部分线条设置为drawline,用于下一步的绘制
  860. else // 线段全部在椭圆内
  861. allLines[i].DrawLine = allLines[i].Line;
  862. innerLines.Add(allLines[i]);
  863. }
  864. // 角度阈值,用来判断线段的梯度方向是否指向屏幕中心(avgPoint)
  865. var avaAngleHalf = 75f;
  866. #region 内部函数
  867. float ScreenGrad(LineIdentified line)
  868. {
  869. var dir = (line.Line.B - line.Line.A).Normalized;
  870. var vertical = new Vector(-dir.y, dir.x) * (gradientLength / 2);
  871. int step = (int)(minLength / 5);
  872. var ll = line.Line.Length;
  873. var lg = new List<float>();
  874. for (int i = 0; i <= ll; i += step)
  875. {
  876. var point = line.Line.A + dir * i;
  877. var ga = point + vertical;
  878. var gb = point - vertical;
  879. lg.Add(screenLocateMatList[line.Batch][(int)ga.x, (int)ga.y] - screenLocateMatList[line.Batch][(int)gb.x, (int)gb.y]);
  880. }
  881. return Math.Abs(lg.Mean());
  882. }
  883. // 沿直线计算综合梯度(梯度乘以长度系数,再乘以距离系数), distanceRatio是实际距离除以最大距离
  884. float estimateGradient(LineIdentified line, float distanceRatio)
  885. {
  886. var gM = ScreenGrad(line);
  887. float e = (float)Math.Sqrt(Math.Ceiling(line.Line.Length / minLength)); // 长度系数,筛选时梯度更大、长度更长的线段更优
  888. float d = (5 - distanceRatio) / 4; // 距离系数,距离越近,系数越大
  889. line.ZIMGradient = e * d * gM; // 记录一下综合梯度,全自动新增的功能会二次使用
  890. return line.ZIMGradient;
  891. }
  892. // 根据线段梯度的角度,判断是不是屏幕的边,index代表是哪条边(顺序是: 下、右、上、左)
  893. void GetScreenLineIndex(LineIdentified line)
  894. {
  895. var a = (avgPoint - (line.Line.A + line.Line.B) / 2).DegreeToXAxis();
  896. //Debug.Log(a + ", " + gradient + ", " + sum);
  897. var index = -1;
  898. if (Math.Abs(a - line.GradientDegree) < avaAngleHalf || Math.Abs(a - 360 - line.GradientDegree) < avaAngleHalf || Math.Abs(a + 360 - line.GradientDegree) < avaAngleHalf)
  899. {
  900. if (line.GradientDegree > 45 && line.GradientDegree < 135) // 下
  901. index = 0;
  902. else if (line.GradientDegree > 135 && line.GradientDegree < 225) // 右
  903. index = 1;
  904. else if (line.GradientDegree > 225 && line.GradientDegree < 315) // 上
  905. index = 2;
  906. else
  907. index = 3;
  908. }
  909. line.ScreenLineIndex = index;
  910. }
  911. #endregion
  912. // 根据梯度方向,判断是哪条边
  913. foreach (var l in innerLines)
  914. GetScreenLineIndex(l);
  915. // 下、右、上、左, 半自动和自动
  916. var quadLinesSemiAuto = new List<(float, LineIdentified)>[4] { new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>() };
  917. var quadLinesAuto = new List<(float, LineIdentified)>[4] { new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>(), new List<(float, LineIdentified)>() };
  918. possibleLines = new List<LineIdentified>();
  919. // 如果已有手动定位数据,根据现有数据筛选线条(半自动)
  920. manualLines = null;
  921. if (QuadManual != null)
  922. {
  923. Debug.Log($"[IdentifyLineLSD] 根据已有定位数据做筛选, QuadManual: {QuadManual}");
  924. manualLines = QuadManual.GetLines().Select((i) => new LineIdentified(0, i, 0, 0, true)).ToArray();
  925. var calibration = ScreenLocate.Main.ReDoLocateCalibrationRatio * Size.y;
  926. var avgPointCross = manualLines.Select((i) => i.Line.LineCrossWithPoint(avgPoint)).ToArray(); // 对于平均点的corss值
  927. var avgPointPedal = manualLines.Select((i) => o0Extension.PointPedal(i.Line, avgPoint, out _)).ToArray(); // 当前定位的垂足,下、右、上、左
  928. foreach (var line in innerLines)
  929. {
  930. // 筛选条件:1-梯度方向匹配,2-垂足的距离足够近, 3-线段的AB点均在旧线段外部, 4-新的线段的中点,到旧线段的垂足,要在旧线段内
  931. if (line.ScreenLineIndex >= 0)
  932. {
  933. var distanceToOld = (o0Extension.PointPedal(line.Line, avgPoint, out _) - avgPointPedal[line.ScreenLineIndex]).Length;
  934. if (distanceToOld < calibration &&
  935. manualLines[line.ScreenLineIndex].Line.LineCrossWithPoint(line.Line.A) * avgPointCross[line.ScreenLineIndex] <= 0 &&
  936. manualLines[line.ScreenLineIndex].Line.LineCrossWithPoint(line.Line.B) * avgPointCross[line.ScreenLineIndex] <= 0)
  937. {
  938. var middleToOldLine = o0Extension.PointPedal(manualLines[line.ScreenLineIndex].Line, (line.Line.A + line.Line.B) / 2, out bool inLineSegment);
  939. if (inLineSegment)
  940. {
  941. quadLinesSemiAuto[line.ScreenLineIndex].Add((estimateGradient(line, distanceToOld / calibration), line));
  942. possibleLines.Add(line);
  943. }
  944. }
  945. }
  946. }
  947. }
  948. // 全自动
  949. foreach (var line in innerLines)
  950. {
  951. if (line.ScreenLineIndex >= 0 && line.Batch < 1) // 全自动只处理第一张图,默认是色差图
  952. {
  953. quadLinesAuto[line.ScreenLineIndex].Add((estimateGradient(line, 1), line));
  954. }
  955. }
  956. // 获得半自动和全自动的结果
  957. var resultSemiAuto = new LineIdentified[4];
  958. var resultAuto = new LineIdentified[4];
  959. var resultAutoPedal = new Vector[4]; // 用于找全自动的平行线
  960. for (int i = 0; i < 4; i++)
  961. {
  962. if (quadLinesSemiAuto[i].Count > 0)
  963. resultSemiAuto[i] = quadLinesSemiAuto[i].Max((a, b) => a.Item1.CompareTo(b.Item1)).Item2;
  964. if (quadLinesAuto[i].Count > 0)
  965. {
  966. resultAuto[i] = quadLinesAuto[i].Max((a, b) => a.Item1.CompareTo(b.Item1)).Item2;
  967. if (resultAuto[i] != null)
  968. resultAutoPedal[i] = o0Extension.PointPedal(resultAuto[i].Line, avgPoint, out _);
  969. }
  970. }
  971. // 新增功能(全自动,为了解决黑边问题):根据 resultAuto 再找平行线,判断是否替换(1-在 resultAuto 内部,2-接近平行,3-LineGuess判断是直线)
  972. // 半自动不增加这个功能,直接通过增加到手动数据的距离阈值来解决黑边问题
  973. var interSelectable = new List<LineIdentified>();
  974. foreach (var line in innerLines)
  975. {
  976. if (line.ScreenLineIndex >= 0 && line.Batch < 1)
  977. {
  978. if (line != resultAuto[line.ScreenLineIndex])
  979. {
  980. var pedal = o0Extension.PointPedal(line.Line, avgPoint, out _);
  981. var a0 = pedal - avgPoint;
  982. var a0L = a0.Length;
  983. line.DistanceToMiddle = a0L;
  984. var a1 = resultAutoPedal[line.ScreenLineIndex] - avgPoint;
  985. var a1L = a1.Length;
  986. if (a0L < a1L)
  987. {
  988. var dotN = a0.Dot(a1) / a0L / a1L;
  989. if (Math.Abs(dotN - 1) < 0.001) // 接近平行即可
  990. interSelectable.Add(line);
  991. }
  992. }
  993. }
  994. }
  995. var (interA, interB) = FindInterLinePair(new LineGuess(screenLocateMatList, gradientLength * 2, minLength), interSelectable, 12);
  996. if (interA != null) // 替换上一步筛选的结果中的部分边,得到最终的结果
  997. resultAuto[interA.ScreenLineIndex] = interA;
  998. if (interB != null)
  999. resultAuto[interB.ScreenLineIndex] = interB;
  1000. return (resultSemiAuto.ToList(), resultAuto.ToList());
  1001. }
  1002. (LineIdentified a, LineIdentified b) FindInterLinePair(LineGuess lineGuess, List<LineIdentified> interSelectable, int maxCountToSelect = 12)
  1003. {
  1004. Debug.Log("[ScreenIdentification] selectable inter line count: " + interSelectable.Count);
  1005. interSelectable.Sort((a, b) => b.ZIMGradient.CompareTo(a.ZIMGradient));
  1006. int count = 0;
  1007. LineIdentified[] selected = new LineIdentified[4];
  1008. foreach (var line in interSelectable)
  1009. {
  1010. if (lineGuess.GuessIsLine(line)) // 评价是不是Line, 并且找到离中心点最近的
  1011. {
  1012. Debug.Log("[ScreenIdentification] guess is line: (index)" + line.ScreenLineIndex);
  1013. if (selected[line.ScreenLineIndex] == null || selected[line.ScreenLineIndex].DistanceToMiddle > line.DistanceToMiddle)
  1014. selected[line.ScreenLineIndex] = line;
  1015. }
  1016. if (count++ >= maxCountToSelect)
  1017. break;
  1018. }
  1019. var selectedList = new List<LineIdentified>();
  1020. foreach (var i in selected)
  1021. {
  1022. if (i != null)
  1023. selectedList.Add(i);
  1024. }
  1025. if (selectedList.Count == 4)
  1026. {
  1027. if (selected[0].ZIMGradient + selected[2].ZIMGradient > selected[1].ZIMGradient + selected[3].ZIMGradient)
  1028. return (selected[0], selected[2]);
  1029. else
  1030. return (selected[1], selected[3]);
  1031. }
  1032. else if (selected[0] != null && selected[2] != null)
  1033. return (selected[0], selected[2]);
  1034. else if (selected[1] != null && selected[3] != null)
  1035. return (selected[1], selected[3]);
  1036. else if (selectedList.Count == 2)
  1037. return selectedList[0].ZIMGradient > selectedList[1].ZIMGradient ? (selectedList[0], null) : (selectedList[1], null);
  1038. else if (selectedList.Count == 1)
  1039. return (selectedList[0], null);
  1040. else
  1041. return (null, null);
  1042. }
  1043. void SaveImages(string FileDirectory, string log,
  1044. Texture2D ScreenLocateTex, Texture2D allLinesTex, Texture2D ChoosableLineTex, Texture2D ScreenQuadTex)
  1045. {
  1046. if (!Directory.Exists(FileDirectory))
  1047. Directory.CreateDirectory(FileDirectory);
  1048. var time = DateTime.Now.ToString("yyyyMMdd_HHmmss");
  1049. var pngData = ScreenLocate.Main.OutputTextures[7]?.EncodeToPNG();
  1050. if (pngData != null)
  1051. File.WriteAllBytes($"{FileDirectory}{time}A屏幕原图.png", pngData);
  1052. var pngData1 = ScreenLocateTex?.EncodeToPNG();
  1053. if (pngData1 != null)
  1054. File.WriteAllBytes($"{FileDirectory}{time}B黑白色差.png", pngData1);
  1055. var pngData2 = allLinesTex?.EncodeToPNG();
  1056. if (pngData2 != null)
  1057. File.WriteAllBytes($"{FileDirectory}{time}C全部识别线段_半自动.png", pngData2);
  1058. var pngData3 = ChoosableLineTex?.EncodeToPNG();
  1059. if (pngData3 != null)
  1060. File.WriteAllBytes($"{FileDirectory}{time}D备选线段_半自动.png", pngData3);
  1061. var pngData4 = ScreenQuadTex?.EncodeToPNG();
  1062. if (pngData4 != null)
  1063. File.WriteAllBytes($"{FileDirectory}{time}E识别结果.png", pngData4);
  1064. Debug.Log($"<color=aqua>({time}) 屏幕识别图片保存至:程序根目录/{FileDirectory}</color>");
  1065. log +=
  1066. $"\r\n屏幕原图保存{pngData != null}, " +
  1067. $"\r\n黑白色差保存{pngData1 != null}, " +
  1068. $"\r\n全部识别线段(半自动)保存{pngData2 != null}, " +
  1069. $"\r\n备选线段(半自动)保存{pngData3 != null}, " +
  1070. $"\r\n识别结果保存{pngData4 != null}";
  1071. File.WriteAllText($"{FileDirectory}{time}屏幕自动定位_日志.log", log);
  1072. }
  1073. }
  1074. }