diff --git a/examples/func_fit/xor_hyperneat_feedforward.py b/examples/func_fit/xor_hyperneat_feedforward.py
new file mode 100644
index 0000000..79ad39c
--- /dev/null
+++ b/examples/func_fit/xor_hyperneat_feedforward.py
@@ -0,0 +1,63 @@
+import jax.numpy as jnp
+
+from tensorneat.pipeline import Pipeline
+from tensorneat.algorithm.neat import NEAT
+from tensorneat.algorithm.hyperneat import HyperNEATFeedForward, MLPSubstrate
+from tensorneat.genome import DefaultGenome
+from tensorneat.common import ACT
+
+from tensorneat.problem.func_fit import XOR3d
+
+if __name__ == "__main__":
+ pipeline = Pipeline(
+ algorithm=HyperNEATFeedForward(
+ substrate=MLPSubstrate(
+ layers=[4, 5, 5, 5, 1], coor_range=(-5.0, 5.0, -5.0, 5.0)
+ ),
+ neat=NEAT(
+ pop_size=10000,
+ species_size=20,
+ survival_threshold=0.01,
+ genome=DefaultGenome(
+ num_inputs=4, # size of query coors
+ num_outputs=1,
+ init_hidden_layers=(),
+ output_transform=ACT.tanh,
+ ),
+ ),
+ activation=ACT.tanh,
+ output_transform=ACT.sigmoid,
+ ),
+ problem=XOR3d(),
+ generation_limit=1000,
+ fitness_target=-1e-5,
+ )
+
+ # initialize state
+ state = pipeline.setup()
+ # print(state)
+ # run until terminate
+ state, best = pipeline.auto_run(state)
+ # show result
+ pipeline.show(state, best)
+
+ # visualize cppn
+ cppn_genome = pipeline.algorithm.neat.genome
+ cppn_network = cppn_genome.network_dict(state, *best)
+ cppn_genome.visualize(cppn_network, save_path="./imgs/cppn_network.svg")
+
+ # visualize hyperneat genome
+ hyperneat_genome = pipeline.algorithm.hyper_genome
+ # use cppn to calculate the weights in hyperneat genome
+ # return seqs, nodes, conns, u_conns
+ _, hyperneat_nodes, hyperneat_conns, _ = pipeline.algorithm.transform(state, best)
+ # mutate the connection with weight 0 (to visualize the network rather the substrate)
+ hyperneat_conns = jnp.where(
+ hyperneat_conns[:, 2][:, None] == 0, jnp.nan, hyperneat_conns
+ )
+ hyperneat_network = hyperneat_genome.network_dict(
+ state, hyperneat_nodes, hyperneat_conns
+ )
+ hyperneat_genome.visualize(
+ hyperneat_network, save_path="./imgs/hyperneat_network.svg"
+ )
diff --git a/imgs/cppn_network.svg b/imgs/cppn_network.svg
new file mode 100644
index 0000000..c293c71
--- /dev/null
+++ b/imgs/cppn_network.svg
@@ -0,0 +1,406 @@
+
+
+
diff --git a/imgs/hyperneat_network.svg b/imgs/hyperneat_network.svg
new file mode 100644
index 0000000..09efebe
--- /dev/null
+++ b/imgs/hyperneat_network.svg
@@ -0,0 +1,936 @@
+
+
+
diff --git a/src/tensorneat/algorithm/hyperneat/__init__.py b/src/tensorneat/algorithm/hyperneat/__init__.py
index eef06a2..5e28561 100644
--- a/src/tensorneat/algorithm/hyperneat/__init__.py
+++ b/src/tensorneat/algorithm/hyperneat/__init__.py
@@ -1,2 +1,4 @@
from .hyperneat import HyperNEAT
-from .substrate import BaseSubstrate, DefaultSubstrate, FullSubstrate
+from .hyperneat_feedforward import HyperNEATFeedForward
+from .substrate import BaseSubstrate, DefaultSubstrate, FullSubstrate, MLPSubstrate
+
diff --git a/src/tensorneat/algorithm/hyperneat/hyperneat.py b/src/tensorneat/algorithm/hyperneat/hyperneat.py
index 137ca05..5c4bea5 100644
--- a/src/tensorneat/algorithm/hyperneat/hyperneat.py
+++ b/src/tensorneat/algorithm/hyperneat/hyperneat.py
@@ -78,6 +78,8 @@ class HyperNEAT(BaseAlgorithm):
query_res
), self.substrate.make_conns(query_res)
+ h_nodes, h_conns = jax.device_put([h_nodes, h_conns])
+
return self.hyper_genome.transform(state, h_nodes, h_conns)
def forward(self, state, transformed, inputs):
diff --git a/src/tensorneat/algorithm/hyperneat/hyperneat_feedforward.py b/src/tensorneat/algorithm/hyperneat/hyperneat_feedforward.py
new file mode 100644
index 0000000..0245996
--- /dev/null
+++ b/src/tensorneat/algorithm/hyperneat/hyperneat_feedforward.py
@@ -0,0 +1,44 @@
+"""
+HyperNEAT with Feedforward Substrate and genome
+"""
+
+from typing import Callable
+
+from .substrate import *
+from .hyperneat import HyperNEAT, HyperNEATNode, HyperNEATConn
+from tensorneat.common import ACT, AGG
+from tensorneat.algorithm import NEAT
+from tensorneat.genome import DefaultGenome
+
+
+class HyperNEATFeedForward(HyperNEAT):
+ def __init__(
+ self,
+ substrate: BaseSubstrate,
+ neat: NEAT,
+ weight_threshold: float = 0.3,
+ max_weight: float = 5.0,
+ aggregation: Callable = AGG.sum,
+ activation: Callable = ACT.sigmoid,
+ output_transform: Callable = ACT.sigmoid,
+ ):
+ assert (
+ substrate.query_coors.shape[1] == neat.num_inputs
+ ), "Query coors of Substrate should be equal to NEAT input size"
+
+ assert substrate.connection_type == "feedforward", "Substrate should be feedforward"
+
+ self.substrate = substrate
+ self.neat = neat
+ self.weight_threshold = weight_threshold
+ self.max_weight = max_weight
+ self.hyper_genome = DefaultGenome(
+ num_inputs=substrate.num_inputs,
+ num_outputs=substrate.num_outputs,
+ max_nodes=substrate.nodes_cnt,
+ max_conns=substrate.conns_cnt,
+ node_gene=HyperNEATNode(aggregation, activation),
+ conn_gene=HyperNEATConn(),
+ output_transform=output_transform,
+ )
+ self.pop_size = neat.pop_size
diff --git a/src/tensorneat/algorithm/hyperneat/substrate/__init__.py b/src/tensorneat/algorithm/hyperneat/substrate/__init__.py
index f532c43..9725e77 100644
--- a/src/tensorneat/algorithm/hyperneat/substrate/__init__.py
+++ b/src/tensorneat/algorithm/hyperneat/substrate/__init__.py
@@ -1,3 +1,4 @@
from .base import BaseSubstrate
from .default import DefaultSubstrate
from .full import FullSubstrate
+from .mlp import MLPSubstrate
\ No newline at end of file
diff --git a/src/tensorneat/algorithm/hyperneat/substrate/base.py b/src/tensorneat/algorithm/hyperneat/substrate/base.py
index 768ab20..0ae9427 100644
--- a/src/tensorneat/algorithm/hyperneat/substrate/base.py
+++ b/src/tensorneat/algorithm/hyperneat/substrate/base.py
@@ -3,6 +3,8 @@ from tensorneat.common import StatefulBaseClass
class BaseSubstrate(StatefulBaseClass):
+ connection_type = None
+
def make_nodes(self, query_res):
raise NotImplementedError
diff --git a/src/tensorneat/algorithm/hyperneat/substrate/default.py b/src/tensorneat/algorithm/hyperneat/substrate/default.py
index af99191..f3e02b3 100644
--- a/src/tensorneat/algorithm/hyperneat/substrate/default.py
+++ b/src/tensorneat/algorithm/hyperneat/substrate/default.py
@@ -6,6 +6,9 @@ from tensorneat.genome.utils import set_conn_attrs
class DefaultSubstrate(BaseSubstrate):
+
+ connection_type = "recurrent"
+
def __init__(self, num_inputs, num_outputs, coors, nodes, conns):
self.inputs = num_inputs
self.outputs = num_outputs
diff --git a/src/tensorneat/algorithm/hyperneat/substrate/full.py b/src/tensorneat/algorithm/hyperneat/substrate/full.py
index ec0bd23..6e07b2b 100644
--- a/src/tensorneat/algorithm/hyperneat/substrate/full.py
+++ b/src/tensorneat/algorithm/hyperneat/substrate/full.py
@@ -3,6 +3,9 @@ from .default import DefaultSubstrate
class FullSubstrate(DefaultSubstrate):
+
+ connection_type = "recurrent"
+
def __init__(
self,
input_coors=((-1, -1), (0, -1), (1, -1)),
@@ -60,6 +63,7 @@ def analysis_substrate(input_coors, output_coors, hidden_coors):
) # input_idx, output_idx, weight
conns[:, :2] = correspond_keys
+ print(query_coors, nodes, conns)
return query_coors, nodes, conns
diff --git a/src/tensorneat/algorithm/hyperneat/substrate/mlp.py b/src/tensorneat/algorithm/hyperneat/substrate/mlp.py
new file mode 100644
index 0000000..ccdaf30
--- /dev/null
+++ b/src/tensorneat/algorithm/hyperneat/substrate/mlp.py
@@ -0,0 +1,94 @@
+from typing import List, Tuple
+import numpy as np
+
+from .default import DefaultSubstrate
+
+
+class MLPSubstrate(DefaultSubstrate):
+
+ connection_type = "feedforward"
+
+ def __init__(self, layers: List[int], coor_range: Tuple[float] = (-1, 1, -1, 1)):
+ """
+ layers: list of integers, the number of neurons in each layer
+ coor_range: tuple of 4 floats, the range of the substrate. (x_min, x_max, y_min, y_max)
+ """
+ assert len(layers) >= 2, "The number of layers should be at least 2"
+ for layer in layers:
+ assert layer > 0, "The number of neurons in each layer should be positive"
+ assert coor_range[0] < coor_range[1], "x_min should be less than x_max"
+ assert coor_range[2] < coor_range[3], "y_min should be less than y_max"
+
+ num_inputs = layers[0]
+ num_outputs = layers[-1]
+ query_coors, nodes, conns = analysis_substrate(layers, coor_range)
+ super().__init__(num_inputs, num_outputs, query_coors, nodes, conns)
+
+
+def analysis_substrate(layers, coor_range):
+ x_min, x_max, y_min, y_max = coor_range
+ layer_cnt = len(layers)
+ y_interval = (y_max - y_min) / (layer_cnt - 1)
+
+ # prepare nodes indices and coordinates
+ node_coors = {}
+ input_indices = list(range(layers[0]))
+ input_coors = cal_coors(layers[0], x_min, x_max, y_min)
+
+ output_indices = list(range(layers[0], layers[0] + layers[-1]))
+ output_coors = cal_coors(layers[-1], x_min, x_max, y_max)
+
+ if layer_cnt == 2: # only input and output layers
+ node_layers = [input_indices, output_indices]
+ node_coors = [*input_coors, *output_coors]
+ else:
+ hidden_indices, hidden_coors = [], []
+ hidden_idx = layers[0] + layers[-1]
+ hidden_layers = []
+ for layer_idx in range(1, layer_cnt - 1):
+ y_coor = y_min + layer_idx * y_interval
+ indices = list(range(hidden_idx, hidden_idx + layers[layer_idx]))
+ coors = cal_coors(layers[layer_idx], x_min, x_max, y_coor)
+
+ hidden_layers.append(indices)
+ hidden_indices.extend(indices)
+ hidden_coors.extend(coors)
+ hidden_idx += layers[layer_idx]
+
+ node_layers = [
+ input_indices,
+ *hidden_layers,
+ output_indices,
+ ] # the layers of hyperneat network
+ node_coors = [*input_coors, *output_coors, *hidden_coors]
+
+ # prepare connections
+ query_coors, correspond_keys = [], []
+ for layer_idx in range(layer_cnt - 1):
+ for i in range(layers[layer_idx]):
+ for j in range(layers[layer_idx + 1]):
+ neuron1 = node_layers[layer_idx][i]
+ neuron2 = node_layers[layer_idx + 1][j]
+ query_coors.append((*node_coors[neuron1], *node_coors[neuron2]))
+ correspond_keys.append((neuron1, neuron2))
+
+ # nodes order in TensorNEAT must be input->output->hidden
+ ordered_nodes = [*node_layers[0], *node_layers[-1]]
+ for layer in node_layers[1:-1]:
+ ordered_nodes.extend(layer)
+ nodes = np.array(ordered_nodes)[:, np.newaxis]
+ conns = np.zeros(
+ (len(correspond_keys), 3), dtype=np.float32
+ ) # input_idx, output_idx, weight
+ conns[:, :2] = correspond_keys
+
+ query_coors = np.array(query_coors)
+
+ return query_coors, nodes, conns
+
+
+def cal_coors(neuron_cnt, x_min, x_max, y_coor):
+ if neuron_cnt == 1: # only one neuron in this layer
+ return [((x_min + x_max) / 2, y_coor)]
+ x_interval = (x_max - x_min) / (neuron_cnt - 1)
+ return [(x_min + x_interval * i, y_coor) for i in range(neuron_cnt)]
diff --git a/src/tensorneat/genome/default.py b/src/tensorneat/genome/default.py
index 2464fc9..eb9d1f3 100644
--- a/src/tensorneat/genome/default.py
+++ b/src/tensorneat/genome/default.py
@@ -331,3 +331,4 @@ class DefaultGenome(BaseGenome):
**kwargs,
)
plt.savefig(save_path, dpi=save_dpi)
+ plt.close()
diff --git a/test/mlp_substrate.py b/test/mlp_substrate.py
new file mode 100644
index 0000000..030f79f
--- /dev/null
+++ b/test/mlp_substrate.py
@@ -0,0 +1,6 @@
+from tensorneat.algorithm.hyperneat.substrate.mlp import MLPSubstrate, analysis_substrate
+
+layers = [3, 4, 2]
+coor_range = (-1, 1, -1, 1)
+nodes = analysis_substrate(layers, coor_range)
+print(nodes)