@@ -61,6 +61,43 @@ pub fn Neuron(comptime T: type) type {
6161 return env .rNormal (@as (T , -1 ), @as (T , 1 )) catch @as (T , 0 );
6262 }
6363
64+ /// Get all parameters (weights and bias) for optimization
65+ pub fn parameters (self : * Self ) []* ValueType {
66+ var params = arena .allocator ().alloc (* ValueType , self .nin + 1 ) catch unreachable ;
67+
68+ // Copy weights
69+ for (self .weights , 0.. ) | weight , i | {
70+ params [i ] = weight ;
71+ }
72+
73+ // Add bias
74+ params [self .nin ] = self .bias ;
75+ return params ;
76+ }
77+
78+ /// Update parameters using gradient descent
79+ pub fn update_parameters (self : * Self , learning_rate : T ) void {
80+ for (self .weights ) | weight | {
81+ weight .data -= learning_rate * weight .grad ;
82+ }
83+ self .bias .data -= learning_rate * self .bias .grad ;
84+ }
85+
86+ /// Get the number of parameters
87+ pub fn num_parameters (self : * Self ) usize {
88+ return self .nin + 1 ; // weights + bias
89+ }
90+
91+ /// Print neuron information
92+ pub fn print (self : * Self ) void {
93+ std .debug .print ("Neuron({} inputs)\n " , .{self .nin });
94+ std .debug .print (" Weights: " );
95+ for (self .weights , 0.. ) | weight , i | {
96+ std .debug .print ("w{}={any} " , .{ i , weight .data });
97+ }
98+ std .debug .print ("\n Bias: b={any}\n " , .{self .bias .data });
99+ }
100+
64101 /// Forward pass through the neuron
65102 pub fn forward (self : * Self , inputs : []* ValueType ) * ValueType {
66103 if (inputs .len != self .nin ) {
@@ -74,5 +111,74 @@ pub fn Neuron(comptime T: type) type {
74111 // Apply activation function (ReLU)
75112 return sum .relu ();
76113 }
114+
115+ /// Zero gradients for all parameters
116+ pub fn zero_grad (self : * Self ) void {
117+ for (self .weights ) | weight | {
118+ weight .grad = @as (T , 0 );
119+ }
120+ self .bias .grad = @as (T , 0 );
121+ }
122+ };
123+ }
124+
125+ /// Represents a layer of neurons with a configurable input size
126+ ///
127+ /// This is a generic type that can be used to create a layer of neurons with configurable input size.
128+ ///
129+ /// # Example
130+ /// ```zig
131+ /// const Layer = @import("nn.zig").Layer;
132+ /// const layer = Layer(f32).new(3, 2);
133+ /// const output = try layer.forward(&inputs);
134+ /// ```
135+ pub fn Layer (comptime T : type ) type {
136+ const ValueType = engine .Value (T );
137+ const NeuronType = Neuron (T );
138+ return struct {
139+ const Self = @This ();
140+
141+ /// The number of inputs to the layer
142+ nin : usize ,
143+ /// The number of neurons in the layer
144+ nout : usize ,
145+ /// The neurons in the layer
146+ neurons : []* NeuronType ,
147+
148+ var arena : std.heap.ArenaAllocator = undefined ;
149+
150+ pub fn init (alloc : std.mem.Allocator ) ! void {
151+ arena = std .heap .ArenaAllocator .init (alloc );
152+ }
153+
154+ pub fn deinit () void {
155+ arena .deinit ();
156+ }
157+
158+ pub fn new (nin : usize , nout : usize ) * Self {
159+ const layer = arena .allocator ().create (Self ) catch unreachable ;
160+ const neurons = arena .allocator ().alloc (* NeuronType , nout ) catch unreachable ;
161+
162+ for (neurons ) | * neuron | {
163+ neuron .* = NeuronType .new (nin );
164+ }
165+
166+ layer .* = Self {
167+ .neurons = neurons [0.. ],
168+ .nout = nout ,
169+ };
170+
171+ return layer ;
172+ }
173+
174+ /// Forward pass through the layer
175+ pub fn forward (self : * Self , inputs : []* ValueType ) []* ValueType {
176+ var list = arena .allocator ().alloc (* ValueType , self .nout ) catch unreachable ;
177+ defer arena .allocator ().free (list );
178+ for (self .neurons , 0.. ) | neuron , i | {
179+ list [i ] = neuron .forward (inputs );
180+ }
181+ return list ;
182+ }
77183 };
78184}
0 commit comments