Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import pyvista as pv
- from pyvista import examples
- import pyacvd
- import trimesh
- from tqdm import tqdm
- import torch
- import numpy as np
- from torch_geometric.datasets import MNISTSuperpixels
- from torch_geometric.data import Data
- import torch.nn as nn
- import torch.nn.functional as F
- from torch_geometric.data import DataLoader
- from torch_geometric.nn import SplineConv
- from torch_scatter import scatter
- import os
- import os.path as osp
- import torch_geometric.transforms as T
- import matplotlib.pyplot as plt
- import matplotlib.cm as cm
- from scipy.spatial import Delaunay
- path = osp.join('.','..','data', 'MNIST')
- transform = transform = T.Compose([T.Delaunay(), T.FaceToEdge(remove_faces=False)])
- mnist_dataset = MNISTSuperpixels(path, True, transform=transform)[:10000] # True means it's train dataset
- class MeshData(Data):
- def __init__(self,x=None, y=None, num_nodes=None,
- edge_index0=None,edge_attr0=None,cluster0=None,
- edge_index1=None,edge_attr1=None,cluster1=None,
- edge_index2=None,edge_attr2=None,cluster2=None
- ):
- super().__init__()
- self.x = x
- self.y = y
- self.num_nodes = num_nodes
- self.edge_index0,self.edge_attr0,self.cluster0 = edge_index0,edge_attr0,cluster0
- self.edge_index1,self.edge_attr1,self.cluster1 = edge_index1,edge_attr1,cluster1
- self.edge_index2,self.edge_attr2,self.cluster2 = edge_index2,edge_attr2,cluster2
- def __inc__(self,key,value,*args,**kawrgs):
- if key=='x':
- return self.x.size(0)
- if key=='y':
- return self.y.size(0)
- if key=='edge_index0':
- return self.cluster0.size(0)
- if key=='edge_index1':
- return self.cluster1.size(0)
- if key=='edge_index2':
- return self.cluster2.size(0)
- if key=='cluster0':
- return self.cluster0.max()+1
- if key=='cluster1':
- return self.cluster1.max()+1
- if key=='cluster2':
- return self.cluster2.max()+1
- else:
- return super().__inc__(key,value,*args,**kawrgs)
- # Only Use for Initial Data Generation
- entire_data = pv.MultiBlock()
- for i in tqdm(range(len(mnist_dataset))):
- points0 = np.vstack((np.array(mnist_dataset[i].pos).T, np.zeros(75))).T
- face0 = np.vstack((np.ones(np.array(mnist_dataset[i].face).shape[1])*3, np.array(mnist_dataset[i].face))).T.reshape(-1)
- face0 = np.intc(face0) # Face should be shape(-1), which is acceptable as PolyData// Vertices shape = (x, 3)
- mesh0 = pv.PolyData(points0, face0)
- mesh0['x'] = np.array(mnist_dataset[i].x) # store node feature in the mesh data
- entire_data.append(mesh0)
- # Only Use for Initial Data Generation
- edge_index_all = []
- edge_attr_all = []
- cluster_all = []
- depth = 3 # Number of coarsened graphs for pooling
- mesh_all = []
- for i in tqdm(range(len(entire_data))): # cluster1.nclus = n1
- for d in range(depth):
- edge_index = [[], []]
- edge_attr = []
- cluster = []
- # for i in tqdm(range(len(entire_data))):
- if d==0 :
- mesh0 = entire_data[i]
- else :
- mesh0 = mesh1
- n0 = mesh0.n_points
- reduction_rate = 0.2
- n1 = int(reduction_rate*n0)
- cluster0 = pyacvd.Clustering(mesh0)
- cluster0.cluster(n1)
- pos = mesh0.points
- for [p,q] in cluster0._edges.tolist():
- edge_index[0].extend([p,q])
- edge_index[1].extend([q,p])
- edge_attr.extend([pos[q]-pos[p], pos[p]-pos[q]])
- edge_attr = np.array(edge_attr)
- edge_index_all.append(edge_index)
- edge_attr_all.append(edge_attr)
- cluster_all.append(cluster0.clusters)
- if d !=(depth-1):
- mesh1 = cluster0.create_mesh()
- g = MeshData(x = torch.tensor(entire_data[i]['x'], dtype=torch.float32),
- y = torch.tensor(mnist_dataset[i].y),
- edge_index0 = torch.tensor(edge_index_all[0],dtype=torch.long),
- edge_index1 = torch.tensor(edge_index_all[1],dtype=torch.long),
- edge_index2 = torch.tensor(edge_index_all[2],dtype=torch.long),
- edge_attr0 = torch.tensor(edge_attr_all[0],dtype=torch.float32),
- edge_attr1 = torch.tensor(edge_attr_all[1],dtype=torch.float32),
- edge_attr2 = torch.tensor(edge_attr_all[2],dtype=torch.float32),
- cluster0 = torch.tensor(cluster_all[0],dtype=torch.long),
- cluster1 = torch.tensor(cluster_all[1],dtype=torch.long),
- cluster2 = torch.tensor(cluster_all[2],dtype=torch.long),
- )
- mesh_all.append(g)
- # Normalize edge_attrs and save the all mesh data
- xmm0,ymm0,zmm0 = [],[],[]
- xmm1,ymm1,zmm1 = [],[],[]
- xmm2,ymm2,zmm2 = [],[],[]
- for p in tqdm(mesh_all):
- xmm0.append(p.edge_attr0[:,0].max())
- ymm0.append(p.edge_attr0[:,1].max())
- zmm0.append(p.edge_attr0[:,2].max())
- xmm1.append(p.edge_attr1[:,0].max())
- ymm1.append(p.edge_attr1[:,1].max())
- zmm1.append(p.edge_attr1[:,2].max())
- xmm2.append(p.edge_attr2[:,0].max())
- ymm2.append(p.edge_attr2[:,1].max())
- zmm2.append(p.edge_attr2[:,2].max())
- # xyzm0 = np.array([max(xmm0),max(ymm0),max(zmm0)])
- # xyzm1 = np.array([max(xmm1),max(ymm1),max(zmm1)])
- # xyzm2 = np.array([max(xmm2),max(ymm2),max(zmm2)])
- # for 2D (x,y)
- xyzm0 = np.array([max(xmm0),max(ymm0)])
- xyzm1 = np.array([max(xmm1),max(ymm1)])
- xyzm2 = np.array([max(xmm2),max(ymm2)])
- # print(xyzm0,xyzm1,xyzm2)
- print(xyzm0, xyzm1, xyzm2)
- for p in tqdm(mesh_all):
- p.edge_attr0 = 0.05+(p.edge_attr0[:,:2]+xyzm0)/(2.2222222*xyzm0)
- p.edge_attr1 = 0.05+(p.edge_attr1[:,:2]+xyzm1)/(2.2222222*xyzm1)
- p.edge_attr2 = 0.05+(p.edge_attr2[:,:2]+xyzm2)/(2.2222222*xyzm2)
- torch.save(mesh_all,os.path.join('./','mesh_graph.pt'))
Add Comment
Please, Sign In to add comment