add repr for genome and gene;

add ipynb test for testing whether add node or add conn will not change the output for the network.
This commit is contained in:
wls2002
2024-06-09 22:32:29 +08:00
parent 52e5d603f5
commit dfc8f9198e
11 changed files with 127 additions and 10 deletions

View File

@@ -40,3 +40,6 @@ class BaseGene(StatefulBaseClass):
@property
def length(self):
return len(self.fixed_attrs) + len(self.custom_attrs)
def repr(self, state, gene, precision=2):
raise NotImplementedError

View File

@@ -22,3 +22,12 @@ class BaseConnGene(BaseGene):
jax.vmap(self.forward, in_axes=(None, None, 0))(state, attrs, batch_inputs),
attrs,
)
def repr(self, state, conn, precision=2, idx_width=3, func_width=8):
in_idx, out_idx = conn[:2]
in_idx = int(in_idx)
out_idx = int(out_idx)
return "{}(in: {:<{idx_width}}, out: {:<{idx_width}})".format(
self.__class__.__name__, in_idx, out_idx, idx_width=idx_width
)

View File

@@ -60,3 +60,19 @@ class DefaultConnGene(BaseConnGene):
def forward(self, state, attrs, inputs):
weight = attrs[0]
return inputs * weight
def repr(self, state, conn, precision=2, idx_width=3, func_width=8):
in_idx, out_idx, weight = conn
in_idx = int(in_idx)
out_idx = int(out_idx)
weight = round(float(weight), precision)
return "{}(in: {:<{idx_width}}, out: {:<{idx_width}}, weight: {:<{float_width}})".format(
self.__class__.__name__,
in_idx,
out_idx,
weight,
idx_width=idx_width,
float_width=precision + 3,
)

View File

@@ -39,3 +39,11 @@ class BaseNodeGene(BaseGene):
),
attrs,
)
def repr(self, state, node, precision=2, idx_width=3, func_width=8):
idx = node[0]
idx = int(idx)
return "{}(idx={:<{idx_width}})".format(
self.__class__.__name__, idx, idx_width=idx_width
)

View File

@@ -122,3 +122,28 @@ class DefaultNodeGene(BaseNodeGene):
)
return z
def repr(self, state, node, precision=2, idx_width=3, func_width=8):
idx, bias, res, agg, act = node
idx = int(idx)
bias = round(float(bias), precision)
res = round(float(res), precision)
agg = int(agg)
act = int(act)
if act == -1:
act_func = Act.identity
else:
act_func = self.activation_options[act]
return "{}(idx={:<{idx_width}}, bias={:<{float_width}}, response={:<{float_width}}, aggregation={:<{func_width}}, activation={:<{func_width}})".format(
self.__class__.__name__,
idx,
bias,
res,
self.aggregation_options[agg].__name__,
act_func.__name__,
idx_width=idx_width,
float_width=precision + 3,
func_width=func_width
)

View File

@@ -98,3 +98,26 @@ class NodeGeneWithoutResponse(BaseNodeGene):
)
return z
def repr(self, state, node, precision=2, idx_width=3, func_width=8):
idx, bias, agg, act = node
idx = int(idx)
bias = round(float(bias), precision)
agg = int(agg)
act = int(act)
if act == -1:
act_func = Act.identity
else:
act_func = self.activation_options[act]
return "{}(idx={:<{idx_width}}, bias={:<{float_width}}, aggregation={:<{func_width}}, activation={:<{func_width}})".format(
self.__class__.__name__,
idx,
bias,
self.aggregation_options[agg].__name__,
act_func.__name__,
idx_width=idx_width,
float_width=precision + 3,
func_width=func_width,
)

View File

@@ -25,3 +25,8 @@ class KANNode(BaseNodeGene):
def forward(self, state, attrs, inputs, is_output_node=False):
return Agg.sum(inputs)
def repr(self, state, node, precision=2):
idx = node[0]
idx = int(idx)
return "{}(idx: {})".format(self.__class__.__name__, idx)