1
+ # Import libraries
2
+ import sys
3
+ import os
4
+ import pandas as pd
5
+ import h5py
6
+ import numpy as np
7
+ from sklearn .decomposition import PCA
8
+ from sklearn .mixture import GaussianMixture
9
+ import json
10
+ import h5py
11
+ import numpy as np
12
+ import pandas as pd
13
+ import matplotlib .pylab as plt
14
+ from tqdm import tqdm
15
+ import os
16
+ from pyevtk .hl import pointsToVTK
17
+ from pyevtk .hl import gridToVTK #, pointsToVTKAsTIN
18
+ from sklearn .cluster import DBSCAN
19
+ from pyevtk .hl import pointsToVTK
20
+ from pyevtk .hl import gridToVTK
21
+ import trimesh
22
+ from sklearn .decomposition import PCA
23
+ from sklearn .metrics import silhouette_score
24
+ from sklearn .cluster import KMeans
25
+ from sklearn .mixture import GaussianMixture
26
+ from sklearn .metrics import homogeneity_score
27
+ #import plotly.graph_objects as go
28
+ import numpy as np
29
+ import h5py
30
+ from sklearn .decomposition import PCA
31
+ from scipy .spatial import Delaunay
32
+ from functools import reduce
33
+
34
+
35
+
36
+ def centeroidnp (data_frame ):
37
+
38
+ length = len (data_frame ['x' ])
39
+ sum_x = np .sum (data_frame ['x' ])
40
+ sum_y = np .sum (data_frame ['y' ])
41
+ sum_z = np .sum (data_frame ['z' ])
42
+ return sum_x / length , sum_y / length , sum_z / length
43
+ def centeroid_df (data_frame ):
44
+ length = len (data_frame ['x' ])
45
+ sum_x = np .sum (data_frame ['x' ])
46
+ sum_y = np .sum (data_frame ['y' ])
47
+ sum_z = np .sum (data_frame ['z' ])
48
+ return sum_x / length , sum_y / length , sum_z / length
49
+
50
+ def centeroid_np_array (array ):
51
+ length = len (array [:,0 ])
52
+ sum_x = np .sum (array [:,0 ])
53
+ sum_y = np .sum (array [:,1 ])
54
+ sum_z = np .sum (array [:,2 ])
55
+ return sum_x / length , sum_y / length , sum_z / length
56
+
57
+ def correcct_indx (array_wrong , reference ):
58
+ arr_w = array_wrong .flatten ()
59
+ for i in range (len (arr_w )):
60
+ arr_w [i ] = np .argwhere (reference == arr_w [i ]).flatten ()
61
+
62
+ return arr_w .reshape (len (array_wrong ),3 )
63
+
64
+ def magn_fac (d ,x1 ,y1 ,z1 ,x2 ,y2 ,z2 ):
65
+ A = (x2 )** 2
66
+ B = (y2 )** 2
67
+ C = (z2 )** 2
68
+ t = np .sqrt ((d ** 2 )/ (A + B + C ))
69
+ return t ,- t
70
+
71
+ def cyl_endPoints (d , x1 ,x2 ,y1 ,y2 ,z1 ,z2 ):
72
+
73
+ t1 ,t2 = magn_fac (d ,x1 ,y1 ,z1 ,x2 ,y2 ,z2 )
74
+
75
+ U1 = x1 + (x2 )* t1
76
+ V1 = y1 + (y2 )* t1
77
+ W1 = z1 + (z2 )* t1
78
+
79
+ U2 = x1 + (x2 )* t2
80
+ V2 = y1 + (y2 )* t2
81
+ W2 = z1 + (z2 )* t2
82
+
83
+ return [U1 ,V1 ,W1 ,U2 ,V2 ,W2 ]
84
+
85
+ def plot_edit_mesh (nodes , simplices , simplices_edit ,key ):
86
+ print ("**Out mesh for plotting**" )
87
+ a = simplices [simplices_edit ].flatten ()
88
+ ordered , index_or , inverse_or = np .unique (a , return_index = True ,return_inverse = True ,)
89
+ ordered_corr = np .arange (len (ordered ))
90
+ sorted_id = a [np .sort (index_or )]
91
+ sorted_id
92
+ edited_a = np .zeros (len (a ),dtype = "int" )
93
+ for i in range (len (sorted_id )):
94
+ for j in range (len (a )):
95
+ if sorted_id [i ] == a [j ]:
96
+ edited_a [j ] = i
97
+
98
+ nodes_selected = nodes [sorted_id ]
99
+ nodes_selected_edit_zeros = np .hstack ((nodes_selected , (np .zeros (len (nodes_selected )).reshape (- 1 ,1 ))))
100
+ mesh_tr = trimesh .Trimesh (vertices = nodes_selected_edit_zeros , faces = edited_a .reshape (simplices [simplices_edit ].shape ))
101
+ #mesh_tr.export(f'/u/gazal/APT_SiGe/output/output_voxels/mesh{key}.stl');
102
+ return nodes_selected_edit_zeros , edited_a .reshape (simplices [simplices_edit ].shape )
103
+
104
+ def plot_stl (nodes ,simplices ,key ):
105
+ mesh_tr = trimesh .Trimesh (vertices = nodes , faces = simplices )
106
+ mesh_tr .export (f'/u/gazal/APT_SiGe/output/output_voxels/mesh{ key } .stl' );
107
+
108
+
109
+ def meshes_normals (input_file , output_file , grid_size , Normal_end_length ):
110
+ """
111
+ Read the data from Dbscan file
112
+ """
113
+ Clusters = {}
114
+ with h5py .File (input_file , "r" ) as hdfr :
115
+ group1 = hdfr .get ("1" )
116
+ No_clusters = list (group1 .keys ())# list(list(group1.attrs.values())[1])
117
+ for i in No_clusters :
118
+ Clusters [i ] = np .array (group1 .get (i ))
119
+
120
+ cluster_combine_lst = []
121
+ for i in Clusters .keys ():
122
+ print (i )
123
+ image = Clusters [i ][:,0 :3 ]
124
+
125
+ df = pd .DataFrame (image , columns = ("x" ,"y" ,"z" ))
126
+ df ["ID" ] = [int (i )]* len (image )
127
+ cluster_combine_lst .append (df )
128
+
129
+
130
+ dist_cut = 14
131
+ Node_list_plot = []
132
+ no_elements = grid_size # 4 nm
133
+ Normal_end_length = Normal_end_length # 15 nm
134
+ nodes_edit_lst = []
135
+ simp_plot_edit_lst = []
136
+
137
+ keys = [1 ,2 ,3 ]#0,7
138
+
139
+ with h5py .File (output_file , "w" ) as hdfw :
140
+ G_normal_avg_vec = hdfw .create_group ("normal_vec" )
141
+ G_nodes_edit_z = hdfw .create_group ("nodes" )
142
+ G_simplices = hdfw .create_group ("simplices" )
143
+
144
+ G_normal_ends = hdfw .create_group ("normal_ends" )
145
+ for key in keys :
146
+ print ("key = " , key )
147
+ ## load the Precipitate cantroids
148
+ #gr = hdfr.get("{}".format(key))
149
+ #vox_ratio_preci_col = list(list(gr.attrs.values())[3])
150
+ #Preci_cent_col = list(list(gr.attrs.values())[2])
151
+ #cent = np.array(gr.get("0"))
152
+ #print("Centroids = ", len(cent))
153
+ Df_cent = cluster_combine_lst [key ]
154
+
155
+ ## PCA on centroid values
156
+ Data = Df_cent .drop (['ID' ], axis = 1 )
157
+ pca = PCA (n_components = 3 )
158
+ fit_pca = pca .fit (Data .values )
159
+ X_pca = pca .transform (Data .values )
160
+
161
+ ## Define the boundaries of your PCAcentroid data
162
+ #nodes = np.array([[max(X_pca[:, 0]),max(X_pca[:, 1])], [max(X_pca[:, 0]),min(X_pca[:, 1])],
163
+ # [min(X_pca[:, 0]),max(X_pca[:, 1])] , [min(X_pca[:, 0]),min(X_pca[:, 1])]])
164
+
165
+ ## Set a 2D regular grid on the PCA centroids
166
+ #no_elements = 50 # defined above
167
+
168
+ nodes = []
169
+ for i in np .arange (min (X_pca [:, 0 ]),max (X_pca [:, 0 ]), no_elements ):
170
+ for j in np .arange (min (X_pca [:, 1 ]),max (X_pca [:, 1 ]), no_elements ):
171
+ nodes .append ([i ,j ])
172
+ nodes = np .asarray (nodes )
173
+
174
+ print (nodes [0 ,1 ]- nodes [1 ,1 ])
175
+ print ((min (X_pca [:, 0 ])- max (X_pca [:, 0 ]))/ no_elements )
176
+ print ((min (X_pca [:, 1 ])- max (X_pca [:, 1 ]))/ no_elements )
177
+
178
+ ##Triangulate the grid
179
+ tri = Delaunay (nodes )
180
+
181
+ ## Remove the empty grid triangles
182
+ atoms_xy = np .delete (X_pca , 2 , 1 )
183
+ P = atoms_xy
184
+ check = []
185
+ for i in tqdm (range (len (tri .simplices ))):
186
+ #print(i)
187
+ vertices = nodes [tri .simplices [i ]]
188
+ A = vertices [0 ]
189
+ B = vertices [1 ]
190
+ C = vertices [2 ]
191
+
192
+ AB = B - A
193
+ BC = C - B
194
+ CA = A - C
195
+ AP = P - A
196
+ BP = P - B
197
+ CP = P - C
198
+
199
+
200
+ a = np .cross (AB ,AP )
201
+ b = np .cross (BC ,BP )
202
+ c = np .cross (CA ,CP )
203
+
204
+ a1 = np .where (a < 0 , - 2 , 1 )
205
+ #afinal= np.where(a1 >=0, 1, a1)
206
+
207
+ b1 = np .where (b < 0 , - 2 , 1 )
208
+ #bfinal= np.where(b1 >=0, 1, b1)
209
+
210
+ c1 = np .where (c < 0 , - 2 , 1 )
211
+ #cfinal= np.where(c1 >=0, 1, c1)
212
+
213
+ product = a1 * b1 * c1
214
+
215
+ if len (np .argwhere (product == 1 )) != 0 :
216
+ check .append (i )
217
+ #print("1")
218
+ elif len (np .argwhere (product == - 8 )) != 0 :
219
+ check .append (i )
220
+ #print("-8")
221
+
222
+ #print(tri.simplices)
223
+ #G_normal_2d.create_dataset("{}".format(int(key)), data = [nodes, tri.simplices, check])
224
+
225
+ nodes = nodes
226
+ simplices = tri .simplices
227
+ simplices_edit = check
228
+
229
+ plot2 = nodes [np .unique (simplices [simplices_edit ].flatten ())]
230
+ nodes_edit = np .concatenate ((plot2 , np .atleast_2d (np .zeros ((1 ,len (plot2 )))).T ) , axis = 1 )
231
+
232
+ nodes_edit , simp_plot_edit = plot_edit_mesh (nodes , simplices , simplices_edit ,key )
233
+ plot2 = nodes_edit [:,0 :2 ]
234
+ ## Define the depth at each Node:
235
+
236
+ dist_cut = dist_cut
237
+
238
+ for i in tqdm (range (len (nodes_edit ))):
239
+
240
+ center = plot2 [i ]
241
+ shift_org_x = X_pca [:, 0 ]- plot2 [i ][0 ]
242
+ shift_org_y = X_pca [:, 1 ]- plot2 [i ][1 ]
243
+
244
+ a = np .argwhere (shift_org_x < dist_cut )
245
+ b = np .argwhere (shift_org_y < dist_cut )
246
+ c = np .argwhere (shift_org_x > - dist_cut )
247
+ d = np .argwhere (shift_org_y > - dist_cut )
248
+
249
+ k = reduce (np .intersect1d , (a , b , c ,d ))
250
+ if len (k ) == 0 :
251
+ continue
252
+ slice_pts = np .take (X_pca ,k , axis = 0 )
253
+ centroid = centeroidnp (pd .DataFrame (slice_pts , columns = ['x' , 'y' , 'z' ]))
254
+ nodes_edit [i ] = centroid
255
+
256
+
257
+
258
+
259
+ nodes_edit = nodes_edit
260
+ ## Take PCA inverseof the edited nodes
261
+ nodes_edit_Df = pd .DataFrame (data = nodes_edit , columns = ["x" ,"y" ,"z" ])
262
+ nodes_inv = pd .DataFrame (data = pca .inverse_transform (nodes_edit_Df .values ), columns = ["x" ,"y" ,"z" ])
263
+ nodes_edit = nodes_inv .values
264
+ Node_list_plot .append (nodes_edit )
265
+ print ("key = **************" , key )
266
+
267
+ G_nodes_edit_z .create_dataset ("{}" .format (int (key )), data = nodes_edit )
268
+ G_simplices .create_dataset ("{}" .format (int (key )), data = simp_plot_edit )
269
+ plot_stl (nodes_edit ,simp_plot_edit ,key )
270
+ ##Find Normal at each Triangle:
271
+ #def normal2Plane(simplice,nodes):
272
+ sim_edit_node = np .unique (simplices [simplices_edit ].flatten ())
273
+
274
+ normal_avg_lst = []
275
+
276
+ for node in tqdm (range (len (nodes_edit ))):
277
+
278
+ patch = simplices [np .argwhere (simplices == sim_edit_node [node ])[:,0 ]]
279
+
280
+ delete_it_lst = []
281
+ for i in patch .flatten ():
282
+ #print(i)
283
+
284
+ if i not in sim_edit_node :
285
+ #print("{}_NOT".format(i))
286
+ delet_it = np .argwhere (patch == i )[:,0 ]
287
+ #patch_edit = np.delete(patch, delet_it,axis = 0)
288
+ delete_it_lst .append (delet_it .flatten ().tolist ())
289
+
290
+ flat_list = [item for sublist in delete_it_lst for item in sublist ]
291
+
292
+ unique_flat_lst = np .unique (np .array (flat_list ))
293
+
294
+ #print("this ",unique_flat_lst)
295
+ if len (unique_flat_lst ) == 0 :
296
+ patch_edit = patch
297
+ else :
298
+ patch_edit = np .delete (patch , np .unique (np .array (flat_list )),axis = 0 )
299
+ patch_edit = correcct_indx (patch_edit , sim_edit_node )
300
+ #print(patch_edit)
301
+
302
+ normals_local = []
303
+ for i in range (len (patch_edit )):
304
+
305
+ vertices = nodes_edit [patch_edit [i ]]
306
+ #print(vertices)
307
+
308
+ A = vertices [0 ]
309
+ B = vertices [1 ]
310
+ C = vertices [2 ]
311
+ AB = B - A
312
+ AC = C - A
313
+ normal1 = np .cross (AB ,AC )
314
+ normal1_norm = normal1 / np .linalg .norm (normal1 )
315
+
316
+ normals_local .append (normal1_norm .tolist ())
317
+ normal_average = np .average (np .array (normals_local ),axis = 0 )
318
+ normal_avg_lst .append (normal_average .tolist ())
319
+ normal_avg_arr = np .array (normal_avg_lst )
320
+
321
+
322
+ nodes_inv ["X_vec" ] = normal_avg_arr [:,0 ]
323
+ nodes_inv ["Y_vec" ] = normal_avg_arr [:,1 ]
324
+ nodes_inv ["Z_vec" ] = normal_avg_arr [:,2 ]
325
+ nodes_inv .to_csv ('prec_{}_{}_Nodes_and_Normals_3D.csv' .format (int (key ), no_elements ), index = False )
326
+ G_normal_avg_vec .create_dataset ("{}" .format (int (key )), data = normal_avg_arr )
327
+ ## Define normal ends or the cylinder ends
328
+ normal_magni = []
329
+ for i in range (len (nodes_edit )):
330
+ x1 ,y1 ,z1 = nodes_edit [i ]
331
+ x2 ,y2 ,z2 = normal_avg_lst [i ]
332
+ normal_magni .append (cyl_endPoints (Normal_end_length , x1 ,x2 ,y1 ,y2 ,z1 ,z2 ))
333
+ nodes_edit_lst .append (nodes_edit )
334
+ simp_plot_edit_lst .append (simp_plot_edit )
335
+ G_normal_ends .create_dataset ("{}" .format (int (key )), data = np .array (normal_magni ))
336
+
337
+ #with h5py.File('prec_{}_{}_Normal_ends_PCA_INV_3D.hdf5'.format(int(key), no_elements), 'w') as f:
338
+ # dset1 = f.create_dataset("normal_ends", data=np.array(normal_magni))
0 commit comments