Skip to content

Commit fb62ff0

Browse files
author
Chris Sullivan
committed
Added reproduction to Genome via Genome::operator() such that
child = mother(father), where mother is always the more fit genome. Crossover functions to produce a child that is identical in structure to the more fit parent (mother), except where structure (with the same innovation history) exists in bother parents. In this case the genes are taken randomly from each. Need to finish test, but preliminary test was created. Also adding a performance test of the fixed topology feed-forward network for comparison.
1 parent f5cb229 commit fb62ff0

File tree

6 files changed

+127
-22
lines changed

6 files changed

+127
-22
lines changed

feedforward_perf.cc

+31
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
#include "Neural.hh"
2+
#include <chrono>
3+
#include <iostream>
4+
#include "Timer.hh"
5+
6+
int main() {
7+
8+
auto nTrials = 100u;
9+
double tperformance = 0.0;
10+
11+
// Time the network evaluation
12+
for (auto i=0u; i < nTrials; i++)
13+
{
14+
Entendre::FeedForward network({10,20,10});
15+
16+
Timer tbuild([&tperformance](int elapsed) {
17+
tperformance+=elapsed;
18+
});
19+
20+
network.Feed({0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5});
21+
}
22+
std::cout << std:: endl
23+
<< "Average time to evaluate network: "
24+
<< tperformance/nTrials/1.0e6 << " ms\n"
25+
<< std::endl;
26+
tperformance = 0.0;
27+
28+
29+
return 0;
30+
31+
}

libNeat/include/Genome.hh

+9-1
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
#include "Random.hh"
55

66
namespace Constants {
7+
const float Match = 0.5;
78
const float PerturbWeight = 0.9;
89
const float StepSize = 0.1;
910
const float ResetWeightScale = 4.0;
@@ -13,12 +14,19 @@ struct Gene {
1314
bool enabled;
1415
unsigned long innovation_number;
1516
Connection link;
17+
bool operator==(const Gene& other) {
18+
return
19+
(link.origin == other.link.origin) &&
20+
(link.dest == other.link.dest) &&
21+
(link.type == other.link.type);
22+
}
1623
};
1724

1825
class Genome : public uses_random_numbers {
1926
public:
27+
Genome operator()(const Genome& father);
2028
operator NeuralNet() const;
21-
29+
void operator=(const Genome&);
2230
Genome& AddNode(NodeType type);
2331
Genome& AddGene(unsigned int origin, unsigned int dest, ConnectionType type,
2432
bool status, double weight);

libNeat/src/Genome.cc

+60
Original file line numberDiff line numberDiff line change
@@ -14,10 +14,70 @@ Genome::operator NeuralNet() const {
1414
return net;
1515
}
1616

17+
void Genome::operator=(const Genome& rhs) {
18+
this->nodes = rhs.nodes;
19+
this->genes = rhs.genes;
20+
generator = rhs.generator;
21+
}
22+
23+
// Genome mating
24+
Genome Genome::operator()(const Genome& father) {
25+
// Implicit assumption: if there is a fitness difference
26+
// then the mother must be the more fit genome. i.e.
27+
// child = mother(father) s.t. fitness(mother) > fitness(father)
28+
auto& mother = *this;
29+
Genome child;
30+
31+
auto paternal = father.genes.begin();
32+
33+
// Mother is most fit, so we will not take
34+
// any structure above and beyond what is in
35+
// the mother. Thus, when we run out of mother
36+
// genes to iterate over, we are done.
37+
for (auto& maternal : mother.genes) {
38+
const Gene* candidate = nullptr;
39+
40+
if (paternal != father.genes.end()){
41+
if (maternal.innovation_number == paternal->innovation_number) {
42+
candidate = (random()<Constants::Match) ? &maternal : &(*paternal);
43+
}
44+
else {
45+
candidate = &maternal;
46+
}
47+
paternal++;
48+
} else {
49+
// no paternal genes left
50+
candidate = &maternal;
51+
}
52+
53+
//if (!candidate) { continue; }
54+
55+
// does child already have a gene like the candidate?
56+
bool unique = true;
57+
for (auto& gene : child.genes) {
58+
if (gene == *candidate) {
59+
unique = false; break;
60+
}
61+
}
62+
63+
// add gene to child
64+
if (unique) {
65+
child.genes.push_back(*candidate);
66+
}
67+
}
68+
69+
// copy in nodes from more fit parent (mother)
70+
child.nodes = mother.nodes;
71+
72+
// neshima
73+
return child;
74+
}
75+
1776
Genome& Genome::AddNode(NodeType type) {
1877
nodes.emplace_back(type);
1978
return *this;
2079
}
80+
2181
Genome& Genome::AddGene(unsigned int origin, unsigned int dest, ConnectionType type,
2282
bool status, double weight) {
2383

proto.cc

-15
This file was deleted.

tests/GenomeTests.cc

+23
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
#include <gtest/gtest.h>
2+
#include "Genome.hh"
3+
#include "NeuralNet.hh"
4+
#include "Timer.hh"
5+
6+
TEST(Genome,CompareInnovation){
7+
auto mother = Genome()
8+
.AddNode(NodeType::Bias)
9+
.AddNode(NodeType::Input)
10+
.AddNode(NodeType::Hidden)
11+
.AddNode(NodeType::Output)
12+
.AddGene(0,3,ConnectionType::Normal,true,1.)
13+
.AddGene(1,3,ConnectionType::Normal,true,1.)
14+
.AddGene(1,2,ConnectionType::Normal,true,1.)
15+
.AddGene(2,3,ConnectionType::Normal,true,1.);
16+
mother.set_generator(std::make_shared<Uniform>(0,1));
17+
18+
auto father = mother;
19+
mother.AddGene(3,2,ConnectionType::Recurrent,true,1.);
20+
father.AddGene(0,2,ConnectionType::Normal,true,1.);
21+
22+
auto child = mother(father);
23+
}

tests/NetworkTests.cc

+4-6
Original file line numberDiff line numberDiff line change
@@ -97,9 +97,8 @@ TEST(NeuralNet,EvaluateLargeNetwork){
9797
});
9898
auto net = NeuralNet(genome);
9999
}
100-
std::cout << std:: endl
101-
<< "Average time to construct network: "
102-
<< tperformance/nTrials/1.0e6 << " ms\n"
100+
std::cout << " Average time to construct network: "
101+
<< tperformance/nTrials/1.0e6 << " ms"
103102
<< std::endl;
104103
tperformance = 0.0;
105104

@@ -116,9 +115,8 @@ TEST(NeuralNet,EvaluateLargeNetwork){
116115

117116
auto result = net.evaluate({0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5});
118117
}
119-
std::cout << std:: endl
120-
<< "Average time to evaluate network: "
121-
<< tperformance/nTrials/1.0e6 << " ms\n"
118+
std::cout << " Average time to evaluate network: "
119+
<< tperformance/nTrials/1.0e6 << " ms"
122120
<< std::endl;
123121

124122

0 commit comments

Comments
 (0)