Skip to content

Commit 4796d49

Browse files
committed
Layer
1 parent 67b75e3 commit 4796d49

File tree

2 files changed

+126
-1
lines changed

2 files changed

+126
-1
lines changed

src/engine.zig

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
//! This file provides the autograd engine functionality for micrograd
1+
//! This file provides the autograd engine functionality for kiwigrad
22

33
const std = @import("std");
44

@@ -338,5 +338,24 @@ pub fn Value(comptime T: type) type {
338338
items[i].backprop();
339339
}
340340
}
341+
342+
/// Write the computational graph to a Graphviz file
343+
pub fn draw_graph(graph: *Self, name: []const u8, writer: anytype) void {
344+
const dot_name = try std.fmt.allocPrint(std.heap.page_allocator, "{s}.dot", .{name});
345+
defer std.heap.page_allocator.free(dot_name);
346+
const png_name = try std.fmt.allocPrint(std.heap.page_allocator, "{s}.png", .{name});
347+
defer std.heap.page_allocator.free(png_name);
348+
349+
const file = try std.fs.cwd().createFile(dot_name, .{});
350+
defer file.close();
351+
const file_writer = file.writer();
352+
graph.draw_dot(file_writer, std.heap.page_allocator) catch |err| {
353+
std.debug.print("Failed to write dot file: {}\n", .{err});
354+
return;
355+
};
356+
357+
try writer.print("Computational graph written to {s}\n", .{dot_name});
358+
try writer.print("You can visualize it by running: dot -Tpng {s} -o {s}\n", .{ dot_name, png_name });
359+
}
341360
};
342361
}

src/nn.zig

Lines changed: 106 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,43 @@ pub fn Neuron(comptime T: type) type {
6161
return env.rNormal(@as(T, -1), @as(T, 1)) catch @as(T, 0);
6262
}
6363

64+
/// Get all parameters (weights and bias) for optimization
65+
pub fn parameters(self: *Self) []*ValueType {
66+
var params = arena.allocator().alloc(*ValueType, self.nin + 1) catch unreachable;
67+
68+
// Copy weights
69+
for (self.weights, 0..) |weight, i| {
70+
params[i] = weight;
71+
}
72+
73+
// Add bias
74+
params[self.nin] = self.bias;
75+
return params;
76+
}
77+
78+
/// Update parameters using gradient descent
79+
pub fn update_parameters(self: *Self, learning_rate: T) void {
80+
for (self.weights) |weight| {
81+
weight.data -= learning_rate * weight.grad;
82+
}
83+
self.bias.data -= learning_rate * self.bias.grad;
84+
}
85+
86+
/// Get the number of parameters
87+
pub fn num_parameters(self: *Self) usize {
88+
return self.nin + 1; // weights + bias
89+
}
90+
91+
/// Print neuron information
92+
pub fn print(self: *Self) void {
93+
std.debug.print("Neuron({} inputs)\n", .{self.nin});
94+
std.debug.print(" Weights: ");
95+
for (self.weights, 0..) |weight, i| {
96+
std.debug.print("w{}={any} ", .{ i, weight.data });
97+
}
98+
std.debug.print("\n Bias: b={any}\n", .{self.bias.data});
99+
}
100+
64101
/// Forward pass through the neuron
65102
pub fn forward(self: *Self, inputs: []*ValueType) *ValueType {
66103
if (inputs.len != self.nin) {
@@ -74,5 +111,74 @@ pub fn Neuron(comptime T: type) type {
74111
// Apply activation function (ReLU)
75112
return sum.relu();
76113
}
114+
115+
/// Zero gradients for all parameters
116+
pub fn zero_grad(self: *Self) void {
117+
for (self.weights) |weight| {
118+
weight.grad = @as(T, 0);
119+
}
120+
self.bias.grad = @as(T, 0);
121+
}
122+
};
123+
}
124+
125+
/// Represents a layer of neurons with a configurable input size
126+
///
127+
/// This is a generic type that can be used to create a layer of neurons with configurable input size.
128+
///
129+
/// # Example
130+
/// ```zig
131+
/// const Layer = @import("nn.zig").Layer;
132+
/// const layer = Layer(f32).new(3, 2);
133+
/// const output = try layer.forward(&inputs);
134+
/// ```
135+
pub fn Layer(comptime T: type) type {
136+
const ValueType = engine.Value(T);
137+
const NeuronType = Neuron(T);
138+
return struct {
139+
const Self = @This();
140+
141+
/// The number of inputs to the layer
142+
nin: usize,
143+
/// The number of neurons in the layer
144+
nout: usize,
145+
/// The neurons in the layer
146+
neurons: []*NeuronType,
147+
148+
var arena: std.heap.ArenaAllocator = undefined;
149+
150+
pub fn init(alloc: std.mem.Allocator) !void {
151+
arena = std.heap.ArenaAllocator.init(alloc);
152+
}
153+
154+
pub fn deinit() void {
155+
arena.deinit();
156+
}
157+
158+
pub fn new(nin: usize, nout: usize) *Self {
159+
const layer = arena.allocator().create(Self) catch unreachable;
160+
const neurons = arena.allocator().alloc(*NeuronType, nout) catch unreachable;
161+
162+
for (neurons) |*neuron| {
163+
neuron.* = NeuronType.new(nin);
164+
}
165+
166+
layer.* = Self{
167+
.neurons = neurons[0..],
168+
.nout = nout,
169+
};
170+
171+
return layer;
172+
}
173+
174+
/// Forward pass through the layer
175+
pub fn forward(self: *Self, inputs: []*ValueType) []*ValueType {
176+
var list = arena.allocator().alloc(*ValueType, self.nout) catch unreachable;
177+
defer arena.allocator().free(list);
178+
for (self.neurons, 0..) |neuron, i| {
179+
list[i] = neuron.forward(inputs);
180+
}
181+
return list;
182+
}
77183
};
78184
}

0 commit comments

Comments
 (0)