entropy working

This commit is contained in:
Priec
2026-03-07 17:36:48 +01:00
commit 009e5c4925
6 changed files with 3025 additions and 0 deletions

1
hod_1/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
target/

2864
hod_1/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

10
hod_1/Cargo.toml Normal file
View File

@@ -0,0 +1,10 @@
[package]
name = "hod_1"
version = "0.1.0"
edition = "2024"
[dependencies]
burn = { version = "0.20.1", default-features = false, features = ["ndarray"] }
burn-ndarray = "0.20.1"
clap = { version = "4.5.60", features = ["derive"] }
ndarray = "0.17.2"

56
hod_1/src/main.rs Normal file
View File

@@ -0,0 +1,56 @@
use burn::tensor::{backend::Backend, Tensor, TensorData};
use burn_ndarray::NdArray;
use ndarray::{array, Array1};
type B = NdArray<f64>;
fn main() {
let device = <B as Backend>::Device::default();
// ndarray::Array1<f64>
let a: Array1<f64> = array![0.4, 0.6];
// Convert ndarray -> Burn tensor
let p = Tensor::<B, 1>::from_data(
TensorData::new(a.to_vec(), [a.len()]),
&device,
);
let h = entropy(p);
println!("{h}");
}
fn entropy(p: Tensor<B, 1>) -> f64 {
// Handle p = 0 safely, because 0 * log(0) should contribute 0
let zero_mask = p.clone().equal_elem(0.0);
let p_safe = p.clone().mask_fill(zero_mask.clone(), 1.0);
let terms = (p.clone() * p_safe.log()).mask_fill(zero_mask, 0.0);
(-terms).sum().into_scalar()
}
// pub fn entropy2(p: Tensor<B, 1>) -> f64 {
// if p == 0.0 {
// return 0.0;
// }
// - p * p.ln()
// }
// pub fn cross_entropy(p: Tensor<B, 1>, q: f64) -> f64 {
// if p == 0.0 {
// return 0.0;
// }
// - p * q.ln()
// }
// pub fn kl_div(p: f64, q: f64) -> f64 {
// if p == 0.0 {
// return 0.0;
// }
// p*(p.ln()-q.ln())
// }
// pub fn kl_div2(p: f64, q: f64) -> f64 {
// cross_entropy(p,q) - entropy(p)
// }