Skip to content

Commit 8703ca9

Browse files
committed
Cleanup
1 parent 5cd16f8 commit 8703ca9

File tree

3 files changed

+2
-13
lines changed

3 files changed

+2
-13
lines changed

brumby/src/harville.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -358,7 +358,7 @@ mod tests {
358358
}
359359

360360
#[test]
361-
fn harville_summary_3x2_condensed_without_scratchings() {
361+
fn harville_summary_2x3_condensed_without_scratchings() {
362362
const WIN_PROBS: [f64; 3] = [0.6, 0.3, 0.1];
363363
const RANKS: usize = 2;
364364
let probs = Matrix::from(

brumby/src/market.rs

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,6 @@ impl Market {
133133
let overround = prices.invert().sum::<f64>() / fair_sum;
134134
let est_rtp = 1.0 / overround;
135135
let initial_k = 1.0 + f64::ln(est_rtp) / f64::ln(prices.len() as f64);
136-
// println!("fit_power: initial_k: {initial_k}");
137136
let outcome = opt::univariate_descent(
138137
&UnivariateDescentConfig {
139138
init_value: initial_k,
@@ -152,7 +151,6 @@ impl Market {
152151
(sum - 1.0).powi(2)
153152
},
154153
);
155-
// println!("fit_power: outcome: {outcome:?}");
156154

157155
let probs = prices
158156
.iter()
@@ -187,8 +185,6 @@ impl Market {
187185
let mut sum = 0.0;
188186
for &price in &prices {
189187
let uncapped_scaled_price = 1.0 + (price - 1.0) / d;
190-
// let capped_scaled_price =
191-
// cap(uncapped_scaled_price, MIN_PRICE, MAX_PRICE);
192188
sum += 1.0 / uncapped_scaled_price;
193189
}
194190

brumby/src/opt.rs

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -49,25 +49,18 @@ pub fn univariate_descent(
4949
}
5050

5151
let (mut value, mut step) = (config.init_value, config.init_step);
52-
// println!("initial value: {value}, residual: {residual}, step: {step}");
5352
let (mut optimal_value, mut optimal_residual) = (value, residual);
54-
// let mut boost = 1.0;
55-
// let mut gradient: f64 = 1.0;
5653
while steps < config.max_steps {
5754
steps += 1;
58-
let new_value = value + step;/* * boost*/ // * f64::min(gradient.abs(), 100.0);
55+
let new_value = value + step;
5956
let new_residual = loss_f(new_value);
60-
// let gradient = (new_residual - residual) / (new_value - value);
61-
// println!("iterations: {iterations}, value: {value}, residual: {residual}, step: {step}, new_value: {new_value}, new_residual: {new_residual}");
6257

6358
if new_residual > residual {
6459
step = -step * 0.5;
6560
if step.abs() < config.min_step {
6661
break;
6762
}
6863
} else if new_residual < optimal_residual {
69-
// boost = f64::min(gradient.abs(), 10.0);
70-
// println!("optimal_residual: {optimal_residual}, new_residual: {new_residual}, boost: {boost}, diff: {}", optimal_residual - new_residual);
7164
optimal_residual = new_residual;
7265
optimal_value = new_value;
7366

0 commit comments

Comments
 (0)