Line too long (122 > 79 characters):
48 self.central_node_edge_encoder = central_encoder(kwargs['edge_embedding'], d_ef, extend=kwargs['extend_dims'])Line too long (80 > 79 characters):
90 x = torch.cat([x, degrees], -1) if self.retain_features else degreesLine too long (98 > 79 characters):
93 edge_features = edge_features.unsqueeze(-1) if edge_features.dim() == 1 else edge_featuresLine too long (100 > 79 characters):
99 edge_features_ii, edge_features = self.central_node_edge_encoder(edge_features, n_nodes)Line too long (105 > 79 characters):
103 self.propagate(edge_index=edge_index, x=x, edge_features=edge_features))Line too long (130 > 79 characters):
106 out = self.update_fn(torch.cat((x, self.propagate(edge_index=edge_index, x=x, edge_features=edge_features)), -1))Line too long (85 > 79 characters):
116 edge_index_i, edge_index_j = edge_index[select, :], edge_index[1 - select, :]Line too long (104 > 79 characters):
120 msgs = torch.sparse.FloatTensor(edge_index, msgs, torch.Size([n_nodes, n_nodes, msgs.shape[1]]))Line too long (106 > 79 characters):
132 raise NotImplementedError("Aggregation kind {} is not currently supported.".format(self.aggr))Line too long (106 > 79 characters):
144 raise NotImplementedError("Message kind {} is not currently supported.".format(self.msg_kind))Line too long (109 > 79 characters):
149 return '{}(msg_fn = {}, update_fn = {})'.format(self.__class__.__name__, self.msg_fn, self.update_fn)