-
Notifications
You must be signed in to change notification settings - Fork 2
/
model.py
74 lines (69 loc) · 2.87 KB
/
model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
import torch
import torch.nn as nn
import torch_geometric as tg
import torch.nn.functional as F
from torch_geometric.nn import MessagePassing
from torch_geometric.utils import add_self_loops, degree
from torch.nn import init
import pdb
####################### Basic Ops #############################
class GCN(torch.nn.Module):
def __init__(self, input_dim, feature_dim, hidden_dim, output_dim,
feature_pre=True, layer_num=2, dropout=True, **kwargs):
super(GCN, self).__init__()
self.feature_pre = feature_pre
self.layer_num = layer_num
self.dropout = dropout
if feature_pre:
self.linear_pre = nn.Linear(input_dim, feature_dim)
self.conv_first = tg.nn.GCNConv(feature_dim, hidden_dim)
else:
self.conv_first = tg.nn.GCNConv(input_dim, hidden_dim)
self.conv_hidden = nn.ModuleList([tg.nn.GCNConv(hidden_dim, hidden_dim) for i in range(layer_num - 2)])
self.conv_out = tg.nn.GCNConv(hidden_dim, output_dim)
def forward(self, data):
x, edge_index = data.x, data.edge_index
if self.feature_pre:
x = self.linear_pre(x)
x = self.conv_first(x, edge_index)
x = F.relu(x)
if self.dropout:
x = F.dropout(x, training=self.training)
for i in range(self.layer_num-2):
x = self.conv_hidden[i](x, edge_index)
x = F.relu(x)
if self.dropout:
x = F.dropout(x, training=self.training)
x = self.conv_out(x, edge_index)
x = F.normalize(x, p=2, dim=-1)
return x
class SAGE(torch.nn.Module):
def __init__(self, input_dim, feature_dim, hidden_dim, output_dim,
feature_pre=True, layer_num=2, dropout=True, **kwargs):
super(SAGE, self).__init__()
self.feature_pre = feature_pre
self.layer_num = layer_num
self.dropout = dropout
if feature_pre:
self.linear_pre = nn.Linear(input_dim, feature_dim)
self.conv_first = tg.nn.SAGEConv(feature_dim, hidden_dim)
else:
self.conv_first = tg.nn.SAGEConv(input_dim, hidden_dim)
self.conv_hidden = nn.ModuleList([tg.nn.SAGEConv(hidden_dim, hidden_dim) for i in range(layer_num - 2)])
self.conv_out = tg.nn.SAGEConv(hidden_dim, output_dim)
def forward(self, data):
x, edge_index = data.x, data.edge_index
if self.feature_pre:
x = self.linear_pre(x)
x = self.conv_first(x, edge_index)
x = F.relu(x)
if self.dropout:
x = F.dropout(x, training=self.training)
for i in range(self.layer_num-2):
x = self.conv_hidden[i](x, edge_index)
x = F.relu(x)
if self.dropout:
x = F.dropout(x, training=self.training)
x = self.conv_out(x, edge_index)
x = F.normalize(x, p=2, dim=-1)
return x