Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
…into activation_functions
  • Loading branch information
yonatankarni committed Jan 8, 2023
2 parents 4426515 + 1d2c2f5 commit 3ef411e
Show file tree
Hide file tree
Showing 6 changed files with 32 additions and 43 deletions.
37 changes: 5 additions & 32 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,31 +11,22 @@ edition = "2018"
csv = "1.1.3"
# we need new version to enable static builds
fasthash = "0.4"
# fasthash = { git = "https://github.com/flier/rust-fasthash", rev="6ca68b93f7" }
# fasthash = { git = "https://github.com/flier/rust-fasthash", rev="ef0c52b4157af9a1a7d19b2a37658b6c26a6bea6" }
serde = {version = "1.0.114" , features = ["derive"]}
serde_json = "1.0.55"
#fastapprox = "0.3.0"
clap = "2.33.1"
byteorder = "1.3.4"
#backtrace = "0.3.46"
#triomphe = "0.1.1"
merand48 = "0.1.0"
daemonize = "0.4.1"
lz4 = "1.23.2"
nom = "7"
dyn-clone = "1.0"
#funty="=1.1.0" # no need for pinning any more
rand = "0.8.5"
rand_distr = "0.4.3"
#rand_core = "0.4.2"
rand_xoshiro = "0.6.0"
# We'll use cloudflare's zlib as it is the fastest game in town
#flate2 = "1.0" #minz library
#flate2 = { version = "1.0", features = ["zlib"], default-features = false }
flate2 = { version = "1.0", features = ["cloudflare_zlib"], default-features = false }
shellwords = "1.1.0"
blas = "0.22"
#jemallocator = "0.5.0"
intel-mkl-src = {version= "0.7.0", default-features = false, features=["download", "mkl-static-lp64-seq"]}
libm = "0.2.6"
[build-dependencies]
Expand All @@ -44,34 +35,16 @@ cbindgen = "0.23.0"
[lib]
crate_type = ["cdylib"]
doctest = false
#blas = "0.22"
#intel-mkl-src = {version= "0.7.0", default-features = false, features=["download", "mkl-static-lp64-seq"]}
#blas = "0.22"
#intel-mkl-src = {version= "0.7.0", default-features = false, features=["download", "mkl-static-lp64-seq"]}
#blas-src = { version = "0.8", features = ["intel-mkl"] }
#openblas-src = {version = "0.10.4", features = ["static"]}
#cblas = "0.4.0"
#intel-mkl-src = {version= "0.7.0", default-features = false, features=["download", "mkl-static-lp64-seq"]}
#blas-src = "0.8.0"
#blas = "0.22"
#openblas-src = {version = "0.10.4", features=["static"]}

#blas = "0.22"
#openblas-src = {version = "0.10.4" }

#blas-src = { version = "0.8", features = ["intel-mkl"] }

#rust-blas="0.2.0"

[dev-dependencies]
tempfile = "3.1.0"
mockstream = "0.0.3"

[profile.release]
debug = true
#lto = 'fat'
#panic = 'abort'
#codegen-units=1
debug = false
lto = false
panic = 'abort'
codegen-units=1

[profile.dev]
opt-level = 2
Expand Down
13 changes: 13 additions & 0 deletions build.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
#!/bin/bash

################################################################################################
# This file serves as an entrypoint for building the binary with specific rustc flags. #
# If there are flags you would like to test out, simply add them to RUSTFLAGS env. By default, #
# no flags are used (generic release build) #
################################################################################################

cargo build --release;

# Using specific flags examples
#RUSTFLAGS="-Ctarget-cpu=skylake" cargo build --release;
#RUSTFLAGS="-Ctarget-cpu=cascadelake" cargo build --release;
2 changes: 2 additions & 0 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
#![allow(unused_mut)]
#![allow(non_snake_case)]
#![allow(redundant_semicolons)]
//#[global_allocator]
//static GLOBAL: jemallocator::Jemalloc = jemallocator::Jemalloc;

use flate2::read::MultiGzDecoder;
use std::collections::VecDeque;
Expand Down
6 changes: 3 additions & 3 deletions src/model_instance.rs
Original file line number Diff line number Diff line change
Expand Up @@ -318,12 +318,12 @@ impl ModelInstance {

if let Some(val) = cl.value_of("ffm_bit_precision") {
mi.ffm_bit_precision = val.parse()?;
println!("FFM num weight bits = {}", mi.ffm_bit_precision); // vwcompat
// println!("FFM num weight bits = {}", mi.ffm_bit_precision); // vwcompat
}

if let Some(val) = cl.value_of("bit_precision") {
mi.bit_precision = val.parse()?;
println!("Num weight bits = {}", mi.bit_precision); // vwcompat
// println!("Num weight bits = {}", mi.bit_precision); // vwcompat
}

mi.learning_rate = parse_float("learning_rate", mi.learning_rate, &cl);
Expand Down Expand Up @@ -407,7 +407,7 @@ impl ModelInstance {
/*! A method that enables updating hyperparameters of an existing (pre-loaded) model.
Currently limited to the most commonly used hyperparameters: ffm_learning_rate, ffm_power_t, power_t, learning_rate. */

println!("Replacing initial regressor's hyperparameters from the command line ..");
// println!("Replacing initial regressor's hyperparameters from the command line ..");
let mut replacement_hyperparam_ids: Vec<(String, String)> = vec![];

// Handle learning rates
Expand Down
9 changes: 5 additions & 4 deletions src/multithread_helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,10 +47,11 @@ impl <T:Sized>Drop for UnsafelySharableTrait<T> {
if count == 0 {
let box_to_be_dropped = ManuallyDrop::take(&mut self.content);
// Now this means that the content will be dropped
println!("Dropping BoxedRegressorTrait!");
} else {
println!("Not dropping BoxedRegressorTrait as there are still {} references!", count);
// println!("Dropping BoxedRegressorTrait!");
}
//else {
// println!("Not dropping BoxedRegressorTrait as there are still {} references!", count);
//}
}

}
Expand Down Expand Up @@ -81,7 +82,7 @@ impl BoxedRegressorTrait {
content: ManuallyDrop::new(r2),
reference_count: self.reference_count.clone()
};
println!("References to object: {}", Arc::<Mutex<PhantomData<u32>>>::strong_count(&ret.reference_count));
// println!("References to object: {}", Arc::<Mutex<PhantomData<u32>>>::strong_count(&ret.reference_count));
ret
}
}
Expand Down
8 changes: 4 additions & 4 deletions src/regressor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -190,8 +190,8 @@ impl Regressor {
_ => Err(format!("unknown nn initialization type: \"{}\"", init_type_str)).unwrap()
};
let neuron_type = block_neural::NeuronType::WeightedSum;
println!("Neuron layer: width: {}, neuron type: {:?}, dropout: {}, maxnorm: {}, init_type: {:?}",
width, neuron_type, dropout, maxnorm, init_type);
// println!("Neuron layer: width: {}, neuron type: {:?}, dropout: {}, maxnorm: {}, init_type: {:?}",
// width, neuron_type, dropout, maxnorm, init_type);
output = block_neural::new_neuronlayer_block(&mut bg,
&mi,
output,
Expand All @@ -206,7 +206,7 @@ impl Regressor {

if layernorm == NNLayerNorm::BeforeActivation {
output = block_normalize::new_normalize_layer_block(&mut bg, &mi, output).unwrap();
println!("Normalize layer before relu");
// println!("Normalize layer before relu");
}

match activation {
Expand All @@ -231,7 +231,7 @@ impl Regressor {

if layernorm == NNLayerNorm::AfterActivation {
output = block_normalize::new_normalize_layer_block(&mut bg, &mi, output).unwrap();
println!("Normalize layer after relu");
// println!("Normalize layer after relu");
}


Expand Down

0 comments on commit 3ef411e

Please sign in to comment.