Skip to content

Commit

Permalink
Restructure layers
Browse files Browse the repository at this point in the history
- only one kind of dense layer
- preconditioning layers for scaling
  • Loading branch information
phannebohm committed Jul 8, 2024
1 parent b6cd69f commit bd37fb4
Show file tree
Hide file tree
Showing 21 changed files with 1,959 additions and 214 deletions.
42 changes: 21 additions & 21 deletions NeuralNetwork/Examples/HelloWorld.mo
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@ within NeuralNetwork.Examples;

model HelloWorld
extends Modelica.Icons.Example;
Modelica.Blocks.Sources.Sine sine(f = 1) annotation(
Modelica.Blocks.Sources.Sine sine(f = 1) annotation(
Placement(transformation(origin = {-60, 0}, extent = {{-20, -20}, {20, 20}})));
Utilities.SimpleEquation eq annotation(
Placement(transformation(origin = {40, 30}, extent = {{-20, -20}, {20, 20}})));
Utilities.PolynomNetwork nn(numInputs = 1, numOutputs = 1) annotation(
Utilities.PolynomNetwork nn annotation(
Placement(transformation(origin = {40, -30}, extent = {{-20, -20}, {20, 20}})));
Real reference = eq.y;
Real prediction = nn.y[1];
Expand All @@ -15,24 +15,24 @@ equation
Line(points = {{-38, 0}, {-20, 0}, {-20, 30}, {16, 30}}, color = {0, 0, 127}));
connect(sine.y, nn.u[1]) annotation(
Line(points = {{-38, 0}, {-20, 0}, {-20, -30}, {16, -30}}, color = {0, 0, 127}));
annotation(
annotation(
Documentation(info = "<html><head></head><body>
<p>Example showcasing how to replace block
<a href=\"modelica://NeuralNetwork.Examples.Utilities.SimpleEquation\">SimpleEquation</a>
</p>
<p>y = u^2 + 0.5u - 2</p>
<p>with artificial neural network surrogate
<a href=\"modelica://NeuralNetwork.Examples.Utilities.PolynomNetwork\">PolynomNetwork</a>.
</p>
<p>
A two-dimensional polynomial equation is approximmated with a dense feed-forward neural network.
The network was generated using Python script from Notebook
<a href=\"https://github.com/AMIT-HSBI/NeuralNetwork/blob/main/Example/HelloWorld.ipynb\">HelloWorld.ipynb</a>.
</p>
<h2>Expected Results</h2>
<p>Compare simulation results for variables <b>reference</b> and <b>prediction</b>.
</p>
<img style='width: 100%' src=\"modelica://NeuralNetwork/Resources/Images/HelloWorld.png\" alt=\"HelloWorld.png\">
</body></html>"),
experiment(StartTime = 0, StopTime = 1, Tolerance = 1e-06, Interval = 0.002));
<p>Example showcasing how to replace block
<a href=\"modelica://NeuralNetwork.Examples.Utilities.SimpleEquation\">SimpleEquation</a>
</p>
<p>y = u^2 + 0.5u - 2</p>
<p>with artificial neural network surrogate
<a href=\"modelica://NeuralNetwork.Examples.Utilities.PolynomNetwork\">PolynomNetwork</a>.
</p>
<p>
A two-dimensional polynomial equation is approximmated with a dense feed-forward neural network.
The network was generated using Python script from Notebook
<a href=\"https://github.com/AMIT-HSBI/NeuralNetwork/blob/main/Example/HelloWorld.ipynb\">HelloWorld.ipynb</a>.
</p>
<h2>Expected Results</h2>
<p>Compare simulation results for variables <b>reference</b> and <b>prediction</b>.
</p>
<img style='width: 100%' src=\"modelica://NeuralNetwork/Resources/Images/HelloWorld.png\" alt=\"HelloWorld.png\">
</body></html>"),
experiment(StartTime = 0, StopTime = 1, Tolerance = 1e-06, Interval = 0.002));
end HelloWorld;
615 changes: 611 additions & 4 deletions NeuralNetwork/Examples/NARX.mo

Large diffs are not rendered by default.

68 changes: 30 additions & 38 deletions NeuralNetwork/Examples/Utilities/NARX_Network.mo
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,18 @@ within NeuralNetwork.Examples.Utilities;
block NARX_Network
// This represents a three layer neural network with a given time delay

NeuralNetwork.Layer.Input layer_1(
bias = {
-0.00000677, 0.20556270, -0.00000132, -0.00000881, -0.00002842, -0.02957853, -0.02369246, -0.00018398, 0.57354265, -0.02597237, -0.00040900, -0.00000582, -0.00000060, 0.46941766, -0.00000007, -0.00001206, -0.00132889, -0.06358027, -0.00029233, -0.00006202, 0.20907070, -0.00067263, 0.23418750, 0.49982220, -0.02735407, 0.17065620, -0.02715359, -0.00007624, -0.00000116, -0.02525282, -0.73274541, 0.00625872
extends NeuralNetwork.Networks.Interfaces.Network(u = layer_scale.u, y = layer_3.y);

NeuralNetwork.Layer.Precondition.Scale layer_scale(
min = {
8.00000000, 8.00000000, 8.00000000, 8.00000000, 8.00000000, 0.00000000, 0.00000000, 0.00000000, 0.00000000, 0.00000000, 3.69310190, 3.69310190, 3.69310190, 3.69634820
},
max = {
12.00000000, 12.00000000, 12.00000000, 12.00000000, 12.00000000, 2.00000000, 2.00000000, 2.00000000, 2.00000000, 2.00000000, 11.79242300, 11.78412200, 11.77998500, 11.79961200
}
) annotation(Placement(transformation(origin = {-110, 0}, extent = {{-30, -30}, {30, 30}})));

NeuralNetwork.Layer.Dense layer_1(
weights = [
0.00000000, 0.00000000, 0.00000000, 0.00000000, -0.00000000, -0.00000000, 0.00000000, -0.00000000, 0.00000000, 0.00000000, 0.00000000, -0.00000000, -0.00000000, -0.00000000;
0.01148210, 0.02394460, 0.03269602, 0.04737446, 0.04980696, 0.03527141, -0.01759291, -0.07528974, -0.00083422, -0.03597534, 0.11129925, 0.05073337, 0.04893982, 0.08150365;
Expand Down Expand Up @@ -41,21 +49,13 @@ block NARX_Network
0.10835370, -0.06687686, -0.08516762, 0.00163639, -0.01949299, 0.27197844, 0.22464167, 0.15060697, 0.49622747, 0.03319209, 0.79951191, 0.15807885, 0.01158971, 0.06415213;
-0.00899850, -0.00043999, 0.05325294, 0.11440736, 0.16661036, 0.04696987, 0.07134484, 0.05588490, 0.12287147, -0.13335720, -0.04430202, -0.07544848, -0.03252205, -0.03692830
],
redeclare function f = NeuralNetwork.ActivationFunctions.ReLu,
numInputs = 14,
numNeurons = 32,
scale = true,
max = {12.00000000, 12.00000000, 12.00000000, 12.00000000, 12.00000000, 2.00000000, 2.00000000, 2.00000000, 2.00000000, 2.00000000, 11.79242300, 11.78412200, 11.77998500, 11.79961200},
min = {8.00000000, 8.00000000, 8.00000000, 8.00000000, 8.00000000, 0.00000000, 0.00000000, 0.00000000, 0.00000000, 0.00000000, 3.69310190, 3.69310190, 3.69310190, 3.69634820},
standardization = false,
mean = zeros(14),
std = ones(14)
) annotation(
Placement(transformation(origin = {-60, 0}, extent = {{-30, -30}, {30, 30}})));
NeuralNetwork.Layer.Hidden layer_2(
bias = {
-0.01915434, 0.54542047, 0.38827929, 0.43106887, 0.27880254, -0.02492376, 0.51309705, -0.00801731, -0.03952867, -0.14912339, -0.01643608, 0.15713434, 0.52502894, 0.03786519, -0.05118470, -0.02619821, 0.09674487, 0.59679657, 0.48414156, 0.42411724, 0.39085755, 0.43997148, -0.00006288, 0.39918363, -0.02014719, 0.27829495, 0.46513379, 0.40009546, 0.07611608, 0.02437924, -0.03650025, 0.42580885
-0.00000677, 0.20556270, -0.00000132, -0.00000881, -0.00002842, -0.02957853, -0.02369246, -0.00018398, 0.57354265, -0.02597237, -0.00040900, -0.00000582, -0.00000060, 0.46941766, -0.00000007, -0.00001206, -0.00132889, -0.06358027, -0.00029233, -0.00006202, 0.20907070, -0.00067263, 0.23418750, 0.49982220, -0.02735407, 0.17065620, -0.02715359, -0.00007624, -0.00000116, -0.02525282, -0.73274541, 0.00625872
},
redeclare function f = NeuralNetwork.ActivationFunctions.ReLu
) annotation(Placement(transformation(origin = {-32, 0}, extent = {{-30, -30}, {30, 30}})));

NeuralNetwork.Layer.Dense layer_2(
weights = [
0.00000000, 0.00000000, -0.00000000, -0.00000000, -0.00000000, 0.00000000, -0.00000000, 0.00000000, 0.00000000, 0.00000000, 0.00000000, 0.00000000, 0.00000000, 0.00000000, 0.00000000, 0.00000000, -0.00000000, -0.00000000, -0.00000000, -0.00000000, -0.00000000, -0.00000000, 0.00000000, -0.00000000, 0.00000000, -0.00000000, -0.00000000, 0.00000000, -0.00000000, -0.00000000, -0.00000000, -0.00000000;
0.00000000, 0.02033915, -0.00000000, -0.00000000, 0.00000000, -0.00000000, -0.02677462, -0.00000000, -0.03843058, -0.00000000, 0.00000000, -0.00000000, 0.00000000, 0.03960535, -0.00000000, -0.00000000, -0.00000000, 0.00000000, -0.00000000, 0.00000000, 0.01462730, -0.00000000, 0.10641428, 0.05693106, 0.00000000, 0.01441677, -0.00000000, 0.00000000, -0.00000000, 0.00000000, 0.09148402, 0.02076523;
Expand Down Expand Up @@ -90,34 +90,26 @@ block NARX_Network
0.00000000, 0.00000000, 0.00000000, -0.00000000, -0.00000000, 0.00000000, -0.00000000, 0.00000000, -0.00000000, 0.00000000, -0.00000000, -0.00000000, -0.00000000, -0.00000000, -0.00000000, -0.00000000, 0.00000000, 0.00000000, -0.00000000, -0.00000000, -0.00000000, 0.00000000, 0.00000000, -0.00000000, -0.00000000, -0.00000000, -0.00000000, 0.00000000, 0.00000000, -0.00000000, 0.00000000, -0.00000000;
-0.00000000, 0.01053307, 0.00000000, 0.00000000, -0.00000000, -0.00000000, -0.01384466, -0.00000000, -0.02080311, 0.00000000, -0.00000000, -0.00000000, -0.00000000, 0.02053364, -0.00000000, 0.00000000, 0.00000000, 0.00000000, -0.00000000, 0.00000000, 0.00747495, 0.00000000, 0.05692909, 0.03067375, 0.00000000, 0.00651125, 0.00000000, 0.00000000, -0.00000000, -0.00000000, 0.04839816, 0.01195795
],
redeclare function f = NeuralNetwork.ActivationFunctions.ReLu,
numInputs = 32,
numNeurons = 32
) annotation(
Placement(transformation(origin = {-2, 0}, extent = {{-30, -30}, {30, 30}})));
NeuralNetwork.Layer.Output layer_3(
bias = {
0.38119549
-0.01915434, 0.54542047, 0.38827929, 0.43106887, 0.27880254, -0.02492376, 0.51309705, -0.00801731, -0.03952867, -0.14912339, -0.01643608, 0.15713434, 0.52502894, 0.03786519, -0.05118470, -0.02619821, 0.09674487, 0.59679657, 0.48414156, 0.42411724, 0.39085755, 0.43997148, -0.00006288, 0.39918363, -0.02014719, 0.27829495, 0.46513379, 0.40009546, 0.07611608, 0.02437924, -0.03650025, 0.42580885
},
redeclare function f = NeuralNetwork.ActivationFunctions.ReLu
) annotation(Placement(transformation(origin = {32, 0}, extent = {{-30, -30}, {30, 30}})));

NeuralNetwork.Layer.Dense layer_3(
weights = [
-0.20471907, 1.43495905, 1.30388153, 1.06038094, -8.76593208, -0.32832265, 1.56641781, -0.32070431, -0.15100922, -0.84189320, -0.05972841, -17.17411995, 1.18185210, -2.50973272, -0.14151685, -0.11161833, -5.19643021, 1.89217937, 1.26354647, 1.40430164, 1.09998930, 1.35513151, -0.31533393, 1.05400574, -0.14657310, -7.38095713, 0.95071983, 0.99007541, -9.05296898, -9.84933472, -0.33525518, 0.76771533
],
numInputs = 32,
numNeurons = 1,
rescale = false,
max = {1.0},
min = {0.0},
destandardization = false,
mean = {0.0},
std = {1.0}
) annotation(
Placement(transformation(origin = {62, 0}, extent = {{-30, -30}, {30, 30}})));
extends NeuralNetwork.Networks.Interfaces.Network(numInputs=14, numOutputs=1);
bias = {
0.38119549
},
redeclare function f = NeuralNetwork.ActivationFunctions.Id
) annotation(Placement(transformation(origin = {92, 0}, extent = {{-30, -30}, {30, 30}})));
equation
connect(u, layer_1.u);
connect(layer_scale.y, layer_1.u) annotation(
Line(points = {{-52, 0}, {-77, 0}}, color = {0, 0, 127}, thickness = 0.5));
connect(layer_1.y, layer_2.u) annotation(
Line(points = {{-41, 0}, {-22, 0}}, color = {0, 0, 127}, thickness = 0.5));
Line(points = {{-13, 0}, {11, 0}}, color = {0, 0, 127}, thickness = 0.5));
connect(layer_2.y, layer_3.u) annotation(
Line(points = {{16, 0}, {41, 0}}, color = {0, 0, 127}, thickness = 0.5));
connect(layer_3.y, y);
Line(points = {{51, 0}, {71, 0}}, color = {0, 0, 127}, thickness = 0.5));
end NARX_Network;
27 changes: 11 additions & 16 deletions NeuralNetwork/Examples/Utilities/PolynomNetwork.mo
Original file line number Diff line number Diff line change
@@ -1,20 +1,17 @@
within NeuralNetwork.Examples.Utilities;

block PolynomNetwork "Neural Network approximating y = u*u + 0.5*u - 2.0 on interval [-1,1]"
extends NeuralNetwork.Networks.Interfaces.Network(numInputs = 1, numOutputs = 1);
Layer.Input inputLayer(
numInputs = 1,
numNeurons = 2,
extends NeuralNetwork.Networks.Interfaces.Network(final u = inputLayer.u, final y = outputLayer.y);
Layer.Dense inputLayer(
weights = layer_1_weights,
bias = layer_1_bias,
redeclare function f = NeuralNetwork.ActivationFunctions.Tanh
) annotation(
Placement(transformation(origin = {-66, 0}, extent = {{-30, -30}, {30, 30}})));
Layer.Output outputLayer(
numInputs = 2,
numNeurons = 1,
Layer.Dense outputLayer(
weights = layer_2_weights,
bias = layer_2_bias
bias = layer_2_bias,
redeclare function f = NeuralNetwork.ActivationFunctions.Id
) annotation(
Placement(transformation(origin = {50, 0}, extent = {{-30, -30}, {30, 30}})));

Expand All @@ -23,15 +20,13 @@ block PolynomNetwork "Neural Network approximating y = u*u + 0.5*u - 2.0 on inte
parameter Real[1,2] layer_2_weights = {{-2.25385, 1.40389}};
parameter Real[1] layer_2_bias = {0.60548};
equation
connect(u, inputLayer.u);
connect(inputLayer.y, outputLayer.u) annotation(
Line(points = {{-48, 0}, {30, 0}}, color = {0, 0, 127}, thickness = 0.5));
connect(outputLayer.y, y);
annotation(
annotation(
Documentation(info = "<html><head></head><body>
<h2>Training</h2><p>
Neural network parameters trained in
<a href=\"https://github.com/AMIT-HSBI/NeuralNetwork/blob/main/Example/HelloWorld.ipynb\">HelloWorld.ipynb</a>.
</p><p>Trained with TensorFlow on 8000 data points from interval [-1,1].</p>
</body></html>"));
<h2>Training</h2><p>
Neural network parameters trained in
<a href=\"https://github.com/AMIT-HSBI/NeuralNetwork/blob/main/Example/HelloWorld.ipynb\">HelloWorld.ipynb</a>.
</p><p>Trained with TensorFlow on 8000 data points from interval [-1,1].</p>
</body></html>"));
end PolynomNetwork;
2 changes: 1 addition & 1 deletion NeuralNetwork/Examples/Utilities/SimpleEquation.mo
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,5 @@ block SimpleEquation
equation
y = u*u + 0.5*u - 2.0;
annotation(
Icon(graphics = {Text( textColor = {0, 0, 255}, extent = {{-98, 18}, {98, -18}}, textString = "y = u² + 0.5u - 2.0")}));
Icon(graphics = {Text(textColor = {0, 0, 255}, extent = {{-98, 18}, {98, -18}}, textString = "y = u² + 0.5u - 2.0")}));
end SimpleEquation;
2 changes: 1 addition & 1 deletion NeuralNetwork/Examples/Utilities/TimeDelay.mo
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ within NeuralNetwork.Examples.Utilities;
block TimeDelay
extends Modelica.Blocks.Interfaces.DiscreteMIMO;
// Store the initial delayed inputs here
parameter Real y_start[nout] = fill(0.0, nout) "Initial (already delayed) output signal";
parameter Real y_start[nout] = zeros(nout) "Initial (already delayed) output signal";
parameter Integer numInputs = 1 "Number of the system Inputs";
parameter Integer numOutputs = 1 "Number of the system Outputs";
// Delay of inputs: delay+1
Expand Down
11 changes: 11 additions & 0 deletions NeuralNetwork/Layer/Dense.mo
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
within NeuralNetwork.Layer;

block Dense "Block for a dense neural network layer"
extends Interfaces.Layer(final numInputs = size(weights, 2), final numNeurons = size(bias, 1));

parameter Real weights[:, :] "Weight table of a fully connected layer";
parameter Real bias[:] "Bias table of a fully connected layer";
replaceable function f = NeuralNetwork.ActivationFunctions.ActivationFunction "Activation function of the layer";
equation
y = f(weights * u + bias);
end Dense;
16 changes: 0 additions & 16 deletions NeuralNetwork/Layer/Hidden.mo

This file was deleted.

54 changes: 0 additions & 54 deletions NeuralNetwork/Layer/Input.mo

This file was deleted.

2 changes: 0 additions & 2 deletions NeuralNetwork/Layer/Interfaces/Layer.mo
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@ within NeuralNetwork.Layer.Interfaces;

partial model Layer
// Topology of the neural network
parameter Real bias[:] "Bias table of the output-layer";
parameter Real weights[:, :] "Weight table of the output-layer";
parameter Integer numInputs "Specification of the inputs of the layer";
parameter Integer numNeurons "Number of neurons in the layer";
Modelica.Blocks.Interfaces.RealInput u[numInputs] "Connector of Real input signals" annotation(
Expand Down
Loading

0 comments on commit bd37fb4

Please sign in to comment.