'copy' imported but unused:
1 import copy'torch_geometric.nn.inits.zeros' imported but unused:
13 from torch_geometric.nn.inits import zeros'pdb' imported but unused:
18 import pdbLine too long (89 > 79 characters):
15 from torch_geometric.utils import remove_self_loops, add_self_loops, segregate_self_loopsLine too long (112 > 79 characters):
29 self.mlp = torch.nn.Sequential(torch.nn.Linear(emb_dim, 2 * emb_dim), torch.nn.BatchNorm1d(2 * emb_dim),Line too long (94 > 79 characters):
30 torch.nn.ReLU(), torch.nn.Linear(2 * emb_dim, emb_dim))Line too long (101 > 79 characters):
41 out = self.mlp((1 + self.eps) *x + self.propagate(edge_index, x=x, edge_attr=edge_embedding))Line too long (84 > 79 characters):
70 # edge_weight = torch.ones((edge_index.size(1), ), device=edge_index.device)Line too long (93 > 79 characters):
77 return self.propagate(edge_index, x=x, edge_attr=edge_embedding, norm=norm) + F.relu(Line too long (83 > 79 characters):
157 # edge_index, norm = coalesce(edge_index, norm, m=x.shape[0], n=x.shape[0])Line too long (93 > 79 characters):
158 edge_index, norm, loop_edge_index, loop_norm = segregate_self_loops(edge_index, norm)Line too long (101 > 79 characters):
159 loop_edge_index, loop_norm = coalesce(loop_edge_index, loop_norm, m=x.shape[0], n=x.shape[0])Line too long (100 > 79 characters):
167 Tx_1 = self.propagate(edge_index, x=x, edge_attr=edge_embedding, norm=norm, size=None)+\Line too long (103 > 79 characters):
172 Tx_2 = self.propagate(edge_index, x=Tx_1, edge_attr=edge_embedding, norm=norm, size=None)+\Unexpected spaces around keyword / parameter equals (in 2 places):
27 super(GINConvNew, self).__init__(aggr = "add")Unexpected spaces around keyword / parameter equals (in 2 places):
34 self.edge_encoder = BondEncoder(emb_dim = emb_dim)Unexpected spaces around keyword / parameter equals (in 2 places):
60 self.edge_encoder = BondEncoder(emb_dim = emb_dim)Unexpected spaces around keyword / parameter equals (in 2 places):
99 self.edge_encoder = BondEncoder(emb_dim = emb_dim)