Coverage for bim2sim/plugins/PluginOpenFOAM/bim2sim_openfoam/utils/evaluateCheckMesh.py: 0%
192 statements
« prev ^ index » next coverage.py v7.10.7, created at 2025-10-01 10:24 +0000
« prev ^ index » next coverage.py v7.10.7, created at 2025-10-01 10:24 +0000
1import json
2import os
3import re
4from collections import OrderedDict
5from pathlib import Path
7import pandas as pd
10def set_in_nested_dict(nested, keys, value):
11 """Sets a value in a nested OrderedDict based on a list of keys."""
12 for key in keys[:-1]:
13 nested = nested.setdefault(key, OrderedDict())
14 nested[keys[-1]] = value
17def process_key_value(line):
18 """Splits a line into key and value."""
19 match = re.match(r"(.*?)\s*[:=]\s*(.*)", line)
20 if match:
21 key, value = match.groups()
22 return key.strip(), value.strip()
23 return None, None
26def parse_checkMesh_log(file_path):
27 def process_table(lines, headers=None):
28 """Processes a table into nested dictionaries."""
29 rows = [re.split(r"\s{2,}", line) for line in lines]
30 if headers:
31 header_parts = re.split(r"\s{3,}", headers)
32 table_data = OrderedDict()
33 if headers and rows and len(header_parts) == len(rows[0]) and len(
34 header_parts) > 2:
35 header_parts = re.split(r"\s{3,}", headers)
36 for row in rows:
37 row_key = row[0]
38 row_values = OrderedDict(
39 (header_parts[i], row[i]) for i in range(1, len(row)))
40 table_data[row_key] = row_values
41 else:
42 if headers and len(header_parts) > 1:
43 table_data[header_parts[0]] = header_parts[1]
44 for row in rows:
45 if len(row) == 2: # Key-value pair table
46 table_data[row[0]] = int(row[1]) if row[1].isdigit() else \
47 row[1]
48 return table_data
50 def parse_irregular_geometry(lines):
51 """Parses irregular 'Checking geometry...' section lines."""
52 result = OrderedDict()
53 for line in lines:
54 new_lines = line.split(". ")
55 for nl in new_lines:
56 match = re.match(r"(.*?)(?:\s*=\s*(.*?))?\s*(OK\.)?$", nl)
57 if match:
58 key, value, status = match.groups()
59 key = key.strip()
60 subkey_match = re.match(r"(.*)\s+\((.*?)\)", key)
61 if subkey_match:
62 main_key, subkey = subkey_match.groups()
63 result.setdefault(main_key.strip(), OrderedDict())[
64 subkey.strip()] = value or status
65 elif status:
66 result[key] = {
67 "value": value.strip() if value else None,
68 "status": status}
69 elif value:
70 result[key] = float(value) if value.replace('.', '',
71 1).isdigit() else value
72 else:
73 result[key] = None
74 return result
76 data = OrderedDict()
77 current_path = []
78 table_buffer = []
79 processing_table = False
80 in_geometry_section = False
81 geometry_lines = []
82 last_line = ""
83 table_done = False
84 keep_key = None
86 try:
87 with open(file_path, "r") as f:
88 lines = f.readlines()
89 except FileNotFoundError:
90 print(f"File {file_path} not found. ")
91 return
93 for line in lines:
94 line = line.rstrip()
96 if not line or line.startswith(("//", "/*", "\\", "*", "|")):
97 if table_buffer:
98 table_done = True
99 else:
100 continue
102 # Store the last line before "End" as "Mesh Quality Result"
103 if line == "End":
104 if last_line:
105 set_in_nested_dict(data, ["Mesh Quality Result"], last_line)
106 break
107 last_line = line
109 if "Checking geometry..." in line:
110 in_geometry_section = True
111 current_path = [line]
112 continue
114 if in_geometry_section:
115 if re.match(r"^\s",
116 line): # Collect indented lines in geometry section
117 geometry_lines.append(line.strip())
118 continue
119 else: # End of geometry section
120 in_geometry_section = False
121 geometry_dict = parse_irregular_geometry(geometry_lines)
122 set_in_nested_dict(data, current_path, geometry_dict)
123 geometry_lines = []
125 # End of a table
126 if processing_table and not re.match(r"^\s", line):
127 if table_buffer:
128 headers = table_buffer.pop(0) if len(
129 table_buffer[0].split()) > 2 else None
130 table_dict = process_table(table_buffer, headers)
131 if keep_key:
132 set_in_nested_dict(data, current_path + [keep_key],
133 table_dict)
134 keep_key = None
135 else:
136 set_in_nested_dict(data, current_path, table_dict)
137 table_buffer = []
138 processing_table = False
140 if not line.startswith(" "):
141 key, value = process_key_value(line)
142 if not value:
143 current_path = [line]
144 continue
146 key, value = process_key_value(line)
147 if key and value:
148 set_in_nested_dict(data, current_path + [key], value)
149 continue
150 elif key:
151 keep_key = key
152 continue
154 if line.startswith(" "):
155 processing_table = True
156 table_done = False
157 table_buffer.append(line.strip())
159 if table_buffer and table_done:
160 headers = table_buffer.pop(0) if len(
161 table_buffer[0].split()) > 2 else None
162 table_dict = process_table(table_buffer, headers)
163 set_in_nested_dict(data, current_path, table_dict)
164 table_done = False
166 if geometry_lines:
167 geometry_dict = parse_irregular_geometry(geometry_lines)
168 set_in_nested_dict(data, current_path, geometry_dict)
170 return data
173def parse_snappyHexMeshLog(file_path):
174 data = OrderedDict()
175 last_line = ""
176 current_path = []
178 try:
179 with open(file_path, "r") as f:
180 lines = f.readlines()
181 except FileNotFoundError:
182 print(f"File {file_path} not found. ")
183 return
185 for line in lines:
186 line = line.rstrip()
188 if not line or line.startswith(("//", "/*", "\\", "*", "|")):
189 continue
191 # Store the last line before "End" as "Mesh Quality Result"
192 if line == "End":
193 if last_line:
194 key, value = process_key_value(last_line)
195 set_in_nested_dict(data, ["Total time in seconds"],
196 value.replace(' s.', '').format("%f"))
197 break
198 last_line = line
200 if not line.startswith(" "):
201 key, value = process_key_value(line)
202 if not value:
203 # current_path = [line]
204 continue
205 if key and value:
206 set_in_nested_dict(data, current_path + [key], value)
208 return data
211if __name__ == '__main__':
212 directory = Path(r'C:\Users\richter\Documents\CFD-Data\PluginTests')
213 global_eval_df = pd.DataFrame()
214 comparative_results = pd.DataFrame()
215 final_dir = Path()
216 for diss_dir in directory.glob(r'grid_conv_1o1p\P1\bm*'):
217 # Check if "OpenFOAM" subdirectory exists within the current directory
218 openfoam_dir = diss_dir / 'OpenFOAM'
219 if openfoam_dir.is_dir():
220 parsed_data = parse_checkMesh_log(openfoam_dir /
221 'logCheckMesh.compress')
222 parsed_data2 = parse_snappyHexMeshLog(openfoam_dir /
223 'log.compress')
225 if parsed_data and parsed_data2:
226 parsed_data.update(
227 {'nProcs': parsed_data2['nProcs']})
228 parsed_data.update(
229 {'TotalTime': parsed_data2['Total time in seconds']})
230 eval_dict = {}
231 eval_dict.update(parsed_data['Mesh stats'])
232 eval_dict.update({'TotalTime': parsed_data['TotalTime']})
233 eval_dict.update({'nProcs': parsed_data['nProcs']})
234 eval_dict.update({'TotalVolume':
235 parsed_data['Checking geometry...'][
236 'Total volume']})
237 eval_dict.update({'MinVolume':
238 parsed_data['Checking geometry...'][
239 'Min volume']})
240 eval_dict.update({'MaxVolume':
241 parsed_data['Checking geometry...'][
242 'Max volume']})
243 eval_dict.update({'MinFaceArea':
244 parsed_data['Checking geometry...'][
245 'Minimum face area']})
246 eval_dict.update({'MaxFaceArea':
247 parsed_data['Checking geometry...'][
248 'Maximum face area']})
249 eval_dict.update({'BlockMeshSize': float("0."+diss_dir.name[
250 2:4])})
251 eval_mesh_df = pd.DataFrame()
252 eval_mesh_df[diss_dir.name] = eval_dict
253 eval_mesh_df = eval_mesh_df.apply(pd.to_numeric)
254 result = pd.DataFrame()
255 for col in global_eval_df.columns:
256 result[f"{col}/{eval_mesh_df.columns[0]}"] = global_eval_df[
257 col] / \
258 eval_mesh_df[
259 eval_mesh_df.columns[
260 0]]
261 comparative_results = pd.concat([comparative_results,
262 result], axis=1)
263 global_eval_df = pd.concat([global_eval_df, eval_mesh_df],
264 axis=1)
265 print(comparative_results)
266 with open(openfoam_dir / 'mesh.json', 'w',
267 encoding='utf-8') as f:
268 json.dump(parsed_data, f, ensure_ascii=True, indent=4)
269 #print(json.dumps(parsed_data, indent=4))
270 final_dir = diss_dir
271 comparative_results.loc['reff'] = comparative_results.loc['points'] ** (
272 1 / 3)
273 print(comparative_results)
274 comparative_results.to_csv(final_dir.parent/'comparative_result.csv')
275 print('DONE')