change a lot a lot a lot!!!!!!!
This commit is contained in:
@@ -1,2 +0,0 @@
|
||||
from .hyperneat import HyperNEAT
|
||||
from .substrate import BaseSubstrate
|
||||
@@ -1,70 +0,0 @@
|
||||
from typing import Type
|
||||
|
||||
import jax
|
||||
import numpy as np
|
||||
|
||||
from .substrate import BaseSubstrate, analysis_substrate
|
||||
from .hyperneat_gene import HyperNEATGene
|
||||
from algorithm import State, Algorithm, neat
|
||||
|
||||
|
||||
class HyperNEAT(Algorithm):
|
||||
|
||||
def __init__(self, config, gene_type: Type[neat.BaseGene], substrate: Type[BaseSubstrate]):
|
||||
super().__init__()
|
||||
self.config = config
|
||||
self.gene_type = gene_type
|
||||
self.substrate = substrate
|
||||
self.neat = neat.NEAT(config, gene_type)
|
||||
|
||||
self.tell = create_tell(self.neat)
|
||||
self.forward_transform = create_forward_transform(config, self.neat)
|
||||
self.forward = HyperNEATGene.create_forward(config)
|
||||
|
||||
def setup(self, randkey, state=State()):
|
||||
state = state.update(
|
||||
below_threshold=self.config['below_threshold'],
|
||||
max_weight=self.config['max_weight']
|
||||
)
|
||||
|
||||
state = self.substrate.setup(state, self.config)
|
||||
h_input_idx, h_output_idx, h_hidden_idx, query_coors, correspond_keys = analysis_substrate(state)
|
||||
h_nodes = np.concatenate((h_input_idx, h_output_idx, h_hidden_idx))[..., np.newaxis]
|
||||
h_conns = np.zeros((correspond_keys.shape[0], 3), dtype=np.float32)
|
||||
h_conns[:, 0:2] = correspond_keys
|
||||
|
||||
state = state.update(
|
||||
# h is short for hyperneat
|
||||
h_input_idx=h_input_idx,
|
||||
h_output_idx=h_output_idx,
|
||||
h_hidden_idx=h_hidden_idx,
|
||||
query_coors=query_coors,
|
||||
correspond_keys=correspond_keys,
|
||||
h_nodes=h_nodes,
|
||||
h_conns=h_conns
|
||||
)
|
||||
state = self.neat.setup(randkey, state=state)
|
||||
|
||||
self.config['h_input_idx'] = h_input_idx
|
||||
self.config['h_output_idx'] = h_output_idx
|
||||
|
||||
return state
|
||||
|
||||
|
||||
def create_tell(neat_instance):
|
||||
def tell(state, fitness):
|
||||
return neat_instance.tell(state, fitness)
|
||||
|
||||
return tell
|
||||
|
||||
|
||||
def create_forward_transform(config, neat_instance):
|
||||
def forward_transform(state, nodes, conns):
|
||||
t = neat_instance.forward_transform(state, nodes, conns)
|
||||
batch_forward_func = jax.vmap(neat_instance.forward, in_axes=(0, None))
|
||||
query_res = batch_forward_func(state.query_coors, t) # hyperneat connections
|
||||
h_nodes = state.h_nodes
|
||||
h_conns = state.h_conns.at[:, 2:].set(query_res)
|
||||
return HyperNEATGene.forward_transform(state, h_nodes, h_conns)
|
||||
|
||||
return forward_transform
|
||||
@@ -1,54 +0,0 @@
|
||||
import jax
|
||||
from jax import numpy as jnp, vmap
|
||||
|
||||
from algorithm.neat import BaseGene
|
||||
from algorithm.neat.gene import Activation
|
||||
from algorithm.neat.gene import Aggregation
|
||||
|
||||
|
||||
class HyperNEATGene(BaseGene):
|
||||
node_attrs = [] # no node attributes
|
||||
conn_attrs = ['weight']
|
||||
|
||||
@staticmethod
|
||||
def forward_transform(state, nodes, conns):
|
||||
N = nodes.shape[0]
|
||||
u_conns = jnp.zeros((N, N), dtype=jnp.float32)
|
||||
|
||||
in_keys = jnp.asarray(conns[:, 0], jnp.int32)
|
||||
out_keys = jnp.asarray(conns[:, 1], jnp.int32)
|
||||
weights = conns[:, 2]
|
||||
|
||||
u_conns = u_conns.at[in_keys, out_keys].set(weights)
|
||||
return nodes, u_conns
|
||||
|
||||
@staticmethod
|
||||
def create_forward(config):
|
||||
act = Activation.name2func[config['h_activation']]
|
||||
agg = Aggregation.name2func[config['h_aggregation']]
|
||||
|
||||
batch_act, batch_agg = vmap(act), vmap(agg)
|
||||
|
||||
def forward(inputs, transform):
|
||||
|
||||
inputs_with_bias = jnp.concatenate((inputs, jnp.ones((1,))), axis=0)
|
||||
nodes, weights = transform
|
||||
|
||||
input_idx = config['h_input_idx']
|
||||
output_idx = config['h_output_idx']
|
||||
|
||||
N = nodes.shape[0]
|
||||
vals = jnp.full((N,), 0.)
|
||||
|
||||
def body_func(i, values):
|
||||
values = values.at[input_idx].set(inputs_with_bias)
|
||||
nodes_ins = values * weights.T
|
||||
values = batch_agg(nodes_ins) # z = agg(ins)
|
||||
values = values * nodes[:, 2] + nodes[:, 1] # z = z * response + bias
|
||||
values = batch_act(values) # z = act(z)
|
||||
return values
|
||||
|
||||
vals = jax.lax.fori_loop(0, config['h_activate_times'], body_func, vals)
|
||||
return vals[output_idx]
|
||||
|
||||
return forward
|
||||
@@ -1,2 +0,0 @@
|
||||
from .base import BaseSubstrate
|
||||
from .tools import analysis_substrate
|
||||
@@ -1,12 +0,0 @@
|
||||
import numpy as np
|
||||
|
||||
|
||||
class BaseSubstrate:
|
||||
|
||||
@staticmethod
|
||||
def setup(state, config):
|
||||
return state.update(
|
||||
input_coors=np.asarray(config['input_coors'], dtype=np.float32),
|
||||
output_coors=np.asarray(config['output_coors'], dtype=np.float32),
|
||||
hidden_coors=np.asarray(config['hidden_coors'], dtype=np.float32),
|
||||
)
|
||||
@@ -1,53 +0,0 @@
|
||||
from typing import Type
|
||||
|
||||
import numpy as np
|
||||
|
||||
from .base import BaseSubstrate
|
||||
|
||||
|
||||
def analysis_substrate(state):
|
||||
cd = state.input_coors.shape[1] # coordinate dimensions
|
||||
si = state.input_coors.shape[0] # input coordinate size
|
||||
so = state.output_coors.shape[0] # output coordinate size
|
||||
sh = state.hidden_coors.shape[0] # hidden coordinate size
|
||||
|
||||
input_idx = np.arange(si)
|
||||
output_idx = np.arange(si, si + so)
|
||||
hidden_idx = np.arange(si + so, si + so + sh)
|
||||
|
||||
total_conns = si * sh + sh * sh + sh * so
|
||||
query_coors = np.zeros((total_conns, cd * 2))
|
||||
correspond_keys = np.zeros((total_conns, 2))
|
||||
|
||||
# connect input to hidden
|
||||
aux_coors, aux_keys = cartesian_product(input_idx, hidden_idx, state.input_coors, state.hidden_coors)
|
||||
query_coors[0: si * sh, :] = aux_coors
|
||||
correspond_keys[0: si * sh, :] = aux_keys
|
||||
|
||||
# connect hidden to hidden
|
||||
aux_coors, aux_keys = cartesian_product(hidden_idx, hidden_idx, state.hidden_coors, state.hidden_coors)
|
||||
query_coors[si * sh: si * sh + sh * sh, :] = aux_coors
|
||||
correspond_keys[si * sh: si * sh + sh * sh, :] = aux_keys
|
||||
|
||||
# connect hidden to output
|
||||
aux_coors, aux_keys = cartesian_product(hidden_idx, output_idx, state.hidden_coors, state.output_coors)
|
||||
query_coors[si * sh + sh * sh:, :] = aux_coors
|
||||
correspond_keys[si * sh + sh * sh:, :] = aux_keys
|
||||
|
||||
return input_idx, output_idx, hidden_idx, query_coors, correspond_keys
|
||||
|
||||
|
||||
def cartesian_product(keys1, keys2, coors1, coors2):
|
||||
len1 = keys1.shape[0]
|
||||
len2 = keys2.shape[0]
|
||||
|
||||
repeated_coors1 = np.repeat(coors1, len2, axis=0)
|
||||
repeated_keys1 = np.repeat(keys1, len2)
|
||||
|
||||
tiled_coors2 = np.tile(coors2, (len1, 1))
|
||||
tiled_keys2 = np.tile(keys2, len1)
|
||||
|
||||
new_coors = np.concatenate((repeated_coors1, tiled_coors2), axis=1)
|
||||
correspond_keys = np.column_stack((repeated_keys1, tiled_keys2))
|
||||
|
||||
return new_coors, correspond_keys
|
||||
Reference in New Issue
Block a user