diff --git a/Unity & C#/P.4 Batches, Layers, and Objects.cs b/Unity & C#/P.4 Batches, Layers, and Objects.cs new file mode 100644 index 0000000..4e0bf9f --- /dev/null +++ b/Unity & C#/P.4 Batches, Layers, and Objects.cs @@ -0,0 +1,114 @@ +using System.Collections.Generic; +using UnityEngine; +using Random = UnityEngine.Random; + +public class Script4 : MonoBehaviour +{ + // Declare Variables + public float[][] X; + public List finalOutput; + + // Create Class + public class LayerDense + { + // Declare Class Specific Variables + float[][] weights = null; + float[] biases = null; + public float[][] output = null; + public List batchResults = new List(); + public List results = new List(); + public List outputsPrinted = new List(); + + // Class Constructor. Wasn't able to figure out how to convert the np.random.randn so I just + // created something that gives results consistent with what they are looking for in the video. + public LayerDense(int n_inputs, int n_neurons) + { + // Initialize the weights to random numbers from -.3 to .3 + weights = new float[n_inputs][]; + for (int i = 0; i < n_inputs; i++) + { + weights[i] = new float[n_neurons]; + for (int j = 0; j PostResults() + { + foreach (var line in output) + { + string outputString = "["; + for (int n = 0; n < line.Length; n++) + { + outputString += line[n].ToString() + ", "; + } + outputString = outputString.TrimEnd(' '); + outputString = outputString.TrimEnd(','); + outputString += "]"; + outputsPrinted.Add(outputString); + } + return outputsPrinted; + } + } + // Start is called before the first frame update + void Start() + { + // Set input values + X = new float[][] + { + new float[] { 1f, 2f, 3f, 2.5f }, + new float[] { 2.0f, 5.0f, -1.0f, 2.0f }, + new float[] { -1.5f, 2.7f, 3.3f, -0.8f } + }; + + // Start with same initial random seed for the same results each time. + Random.InitState(0); + + // Create layers, pass inputs through layers, and get final Output. + LayerDense layer1 = new LayerDense(4, 5); + LayerDense layer2 = new LayerDense(5, 2); + layer1.Forward(X); + layer2.Forward(layer1.output); + finalOutput = layer2.PostResults(); + } +}