Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_delaunay():
adj, nf, labels = delaunay.generate_data('numpy', classes=[0, 1, 2])
correctly_padded(adj, nf, None)
assert adj.shape[0] == labels.shape[0]
# Test that it doesn't crash
delaunay.generate_data('networkx')
def test_delaunay():
adj, nf, labels = delaunay.generate_data('numpy', classes=[0, 1, 2])
correctly_padded(adj, nf, None)
assert adj.shape[0] == labels.shape[0]
# Test that it doesn't crash
delaunay.generate_data('networkx')
of Delaunay triangulations, using a graph attention network (Velickovic et al.)
in batch mode.
"""
from keras.callbacks import EarlyStopping
from keras.layers import Input, Dense
from keras.models import Model
from keras.optimizers import Adam
from keras.regularizers import l2
from sklearn.model_selection import train_test_split
from spektral.datasets import delaunay
from spektral.layers import GraphAttention, GlobalAttentionPool
# Load data
A, X, y = delaunay.generate_data(return_type='numpy', classes=[0, 5])
# Parameters
N = X.shape[-2] # Number of nodes in the graphs
F = X.shape[-1] # Original feature dimensionality
n_classes = y.shape[-1] # Number of classes
l2_reg = 5e-4 # Regularization rate for l2
learning_rate = 1e-3 # Learning rate for Adam
epochs = 20000 # Number of training epochs
batch_size = 32 # Batch size
es_patience = 200 # Patience fot early stopping
# Train/test split
A_train, A_test, \
x_train, x_test, \
y_train, y_test = train_test_split(A, X, y, test_size=0.1)
],
'methods': [],
'classes': []
},
{
'page': 'datasets/tud.md',
'functions': [
datasets.tud.load_data
],
'methods': [],
'classes': []
},
{
'page': 'datasets/delaunay.md',
'functions': [
datasets.delaunay.generate_data
],
'methods': [],
'classes': []
},
{
'page': 'datasets/qm9.md',
'functions': [
datasets.qm9.load_data
],
'methods': [],
'classes': []
},
{
'page': 'datasets/mnist.md',
'functions': [
datasets.mnist.load_data