Add CustomFuncFit into problem; Add related examples

This commit is contained in:
root
2024-07-11 18:32:08 +08:00
parent 3cb5fbf581
commit be6a67d7e2
15 changed files with 241 additions and 437 deletions

View File

@@ -76,7 +76,7 @@ class HyperNEAT(BaseAlgorithm):
h_nodes, h_conns = self.substrate.make_nodes(
query_res
), self.substrate.make_conn(query_res)
), self.substrate.make_conns(query_res)
return self.hyper_genome.transform(state, h_nodes, h_conns)

View File

@@ -6,7 +6,7 @@ class BaseSubstrate(StatefulBaseClass):
def make_nodes(self, query_res):
raise NotImplementedError
def make_conn(self, query_res):
def make_conns(self, query_res):
raise NotImplementedError
@property

View File

@@ -1,5 +1,7 @@
import jax.numpy as jnp
from . import BaseSubstrate
from jax import vmap, numpy as jnp
from .base import BaseSubstrate
from tensorneat.genome.utils import set_conn_attrs
class DefaultSubstrate(BaseSubstrate):
@@ -13,8 +15,9 @@ class DefaultSubstrate(BaseSubstrate):
def make_nodes(self, query_res):
return self.nodes
def make_conn(self, query_res):
return self.conns.at[:, 2:].set(query_res) # change weight
def make_conns(self, query_res):
# change weight of conns
return vmap(set_conn_attrs)(self.conns, query_res)
@property
def query_coors(self):

View File

@@ -31,6 +31,7 @@ name2sympy = {
"maxabs": SympyMaxabs,
"mean": SympyMean,
"clip": SympyClip,
"square": SympySquare,
}

View File

@@ -69,6 +69,10 @@ class Act:
z = jnp.clip(z, -10, 10)
return jnp.exp(z)
@staticmethod
def square(z):
return jnp.pow(z, 2)
@staticmethod
def abs(z):
z = jnp.clip(z, -1, 1)

View File

@@ -184,6 +184,12 @@ class SympyExp(sp.Function):
return rf"\mathrm{{exp}}\left({sp.latex(self.args[0])}\right)"
class SympySquare(sp.Function):
@classmethod
def eval(cls, z):
return sp.Pow(z, 2)
class SympyAbs(sp.Function):
@classmethod
def eval(cls, z):

View File

@@ -17,6 +17,8 @@ class DefaultConn(BaseConn):
weight_mutate_power: float = 0.15,
weight_mutate_rate: float = 0.2,
weight_replace_rate: float = 0.015,
weight_lower_bound: float = -5.0,
weight_upper_bound: float = 5.0,
):
super().__init__()
self.weight_init_mean = weight_init_mean
@@ -24,6 +26,9 @@ class DefaultConn(BaseConn):
self.weight_mutate_power = weight_mutate_power
self.weight_mutate_rate = weight_mutate_rate
self.weight_replace_rate = weight_replace_rate
self.weight_lower_bound = weight_lower_bound
self.weight_upper_bound = weight_upper_bound
def new_zero_attrs(self, state):
return jnp.array([0.0]) # weight = 0
@@ -36,6 +41,7 @@ class DefaultConn(BaseConn):
jax.random.normal(randkey, ()) * self.weight_init_std
+ self.weight_init_mean
)
weight = jnp.clip(weight, self.weight_lower_bound, self.weight_upper_bound)
return jnp.array([weight])
def mutate(self, state, randkey, attrs):
@@ -49,7 +55,7 @@ class DefaultConn(BaseConn):
self.weight_mutate_rate,
self.weight_replace_rate,
)
weight = jnp.clip(weight, self.weight_lower_bound, self.weight_upper_bound)
return jnp.array([weight])
def distance(self, state, attrs1, attrs2):

View File

@@ -47,9 +47,9 @@ class BiasNode(BaseNode):
if isinstance(activation_options, Callable):
activation_options = [activation_options]
if len(aggregation_options) == 1 and aggregation_default is None:
if aggregation_default is None:
aggregation_default = aggregation_options[0]
if len(activation_options) == 1 and activation_default is None:
if activation_default is None:
activation_default = activation_options[0]
self.bias_init_mean = bias_init_mean

View File

@@ -52,9 +52,9 @@ class DefaultNode(BaseNode):
if isinstance(activation_options, Callable):
activation_options = [activation_options]
if len(aggregation_options) == 1 and aggregation_default is None:
if aggregation_default is None:
aggregation_default = aggregation_options[0]
if len(activation_options) == 1 and activation_default is None:
if activation_default is None:
activation_default = activation_options[0]
self.bias_init_mean = bias_init_mean

View File

@@ -1,3 +1,4 @@
from .func_fit import FuncFit
from .xor import XOR
from .xor3d import XOR3d
from .custom import CustomFuncFit

View File

@@ -0,0 +1,119 @@
from typing import Callable, Union, List, Tuple, Sequence
import jax
from jax import vmap, Array, numpy as jnp
import numpy as np
from .func_fit import FuncFit
class CustomFuncFit(FuncFit):
def __init__(
self,
func: Callable,
low_bounds: Union[List, Tuple, Array],
upper_bounds: Union[List, Tuple, Array],
method: str = "sample",
num_samples: int = 100,
step_size: Array = None,
*args,
**kwargs,
):
if isinstance(low_bounds, list) or isinstance(low_bounds, tuple):
low_bounds = np.array(low_bounds, dtype=np.float32)
if isinstance(upper_bounds, list) or isinstance(upper_bounds, tuple):
upper_bounds = np.array(upper_bounds, dtype=np.float32)
try:
out = func(low_bounds)
except Exception as e:
raise ValueError(f"func(low_bounds) raise an exception: {e}")
assert low_bounds.shape == upper_bounds.shape
assert method in {"sample", "grid"}
self.func = func
self.low_bounds = low_bounds
self.upper_bounds = upper_bounds
self.method = method
self.num_samples = num_samples
self.step_size = step_size
self.generate_dataset()
super().__init__(*args, **kwargs)
def generate_dataset(self):
if self.method == "sample":
assert (
self.num_samples > 0
), f"num_samples must be positive, got {self.num_samples}"
inputs = np.zeros(
(self.num_samples, self.low_bounds.shape[0]), dtype=np.float32
)
for i in range(self.low_bounds.shape[0]):
inputs[:, i] = np.random.uniform(
low=self.low_bounds[i],
high=self.upper_bounds[i],
size=(self.num_samples,),
)
elif self.method == "grid":
assert (
self.step_size is not None
), "step_size must be provided when method is 'grid'"
assert (
self.step_size.shape == self.low_bounds.shape
), "step_size must have the same shape as low_bounds"
assert np.all(self.step_size > 0), "step_size must be positive"
inputs = np.zeros((1, 1))
for i in range(self.low_bounds.shape[0]):
new_col = np.arange(
self.low_bounds[i], self.upper_bounds[i], self.step_size[i]
)
inputs = cartesian_product(inputs, new_col[:, None])
inputs = inputs[:, 1:]
else:
raise ValueError(f"Unknown method: {self.method}")
outputs = vmap(self.func)(inputs)
self.data_inputs = jnp.array(inputs)
self.data_outputs = jnp.array(outputs)
@property
def inputs(self):
return self.data_inputs
@property
def targets(self):
return self.data_outputs
@property
def input_shape(self):
return self.data_inputs.shape
@property
def output_shape(self):
return self.data_outputs.shape
def cartesian_product(arr1, arr2):
assert (
arr1.ndim == arr2.ndim
), "arr1 and arr2 must have the same number of dimensions"
assert arr1.ndim <= 2, "arr1 and arr2 must have at most 2 dimensions"
len1 = arr1.shape[0]
len2 = arr2.shape[0]
repeated_arr1 = np.repeat(arr1, len2, axis=0)
tiled_arr2 = np.tile(arr2, (len1, 1))
new_arr = np.concatenate((repeated_arr1, tiled_arr2), axis=1)
return new_arr