Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions SLIDE/Layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ Layer::Layer(size_t noOfNodes, int previousLayerNumOfNodes, int layerID, NodeTyp
{
_Nodes[i].Update(previousLayerNumOfNodes, i, _layerID, type, batchsize, _weights+previousLayerNumOfNodes*i,
_bias[i], _adamAvgMom+previousLayerNumOfNodes*i , _adamAvgVel+previousLayerNumOfNodes*i, _train_array);
addtoHashTable(_Nodes[i]._weights, previousLayerNumOfNodes, _Nodes[i]._bias, i);
addtoHashTable(_Nodes[i]._weights, previousLayerNumOfNodes, *_Nodes[i]._bias, i);
}
auto t2 = std::chrono::high_resolution_clock::now();
auto timeDiffInMiliseconds = std::chrono::duration_cast<std::chrono::microseconds>(t2 - t1).count();
Expand Down Expand Up @@ -418,7 +418,7 @@ int Layer::queryActiveNodeandComputeActivations(int** activenodesperlayer, float
for (size_t s = 0; s < _noOfNodes; s++) {
float tmp = innerproduct(activenodesperlayer[layerIndex], activeValuesperlayer[layerIndex],
lengths[layerIndex], _Nodes[s]._weights);
tmp += _Nodes[s]._bias;
tmp += *_Nodes[s]._bias;
if (find(label, label + labelsize, s) != label + labelsize) {
sortW.push_back(make_pair(-1000000000, s));
what++;
Expand Down
4 changes: 2 additions & 2 deletions SLIDE/Network.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -245,13 +245,13 @@ int Network::ProcessInput(int **inputIndices, float **inputValues, int *lengths,

tmp->_adamAvgMombias = BETA1 * tmp->_adamAvgMombias + (1 - BETA1) * tmp->_tbias;
tmp->_adamAvgVelbias = BETA2 * tmp->_adamAvgVelbias + (1 - BETA2) * tmp->_tbias * tmp->_tbias;
tmp->_bias += ratio*tmplr * tmp->_adamAvgMombias / (sqrt(tmp->_adamAvgVelbias) + EPS);
*tmp->_bias += ratio*tmplr * tmp->_adamAvgMombias / (sqrt(tmp->_adamAvgVelbias) + EPS);
tmp->_tbias = 0;
}
else
{
std::copy(tmp->_mirrorWeights, tmp->_mirrorWeights+(tmp->_dim) , tmp->_weights);
tmp->_bias = tmp->_mirrorbias;
*tmp->_bias = tmp->_mirrorbias;
}
if (tmpRehash) {
int *hashes;
Expand Down
12 changes: 6 additions & 6 deletions SLIDE/Node.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,12 @@ Node::Node(int dim, int nodeID, int layerID, NodeType type, int batchsize, float
_activeInputs = 0;

_weights = weights;
_bias = bias;
_mirrorbias = _bias;
//_bias = bias;
//_mirrorbias = _bias;

}

void Node::Update(int dim, int nodeID, int layerID, NodeType type, int batchsize, float *weights, float bias, float *adamAvgMom, float *adamAvgVel, train* train_blob)
void Node::Update(int dim, int nodeID, int layerID, NodeType type, int batchsize, float *weights, float &bias, float *adamAvgMom, float *adamAvgVel, train* train_blob)
{
_dim = dim;
_IDinLayer = nodeID;
Expand All @@ -55,8 +55,8 @@ void Node::Update(int dim, int nodeID, int layerID, NodeType type, int batchsize
_activeInputs = 0;

_weights = weights;
_bias = bias;
_mirrorbias = _bias;
_bias = &bias;
_mirrorbias = *_bias;

}

Expand Down Expand Up @@ -100,7 +100,7 @@ float Node::getActivation(int* indices, float* values, int length, int inputID)
{
_train[inputID]._lastActivations += _weights[indices[i]] * values[i];
}
_train[inputID]._lastActivations += _bias;
_train[inputID]._lastActivations += (*_bias);

switch (_type)
{
Expand Down
4 changes: 2 additions & 2 deletions SLIDE/Node.h
Original file line number Diff line number Diff line change
Expand Up @@ -81,15 +81,15 @@ class Node
float* _adamAvgVel;
float* _t; //for adam
int* _update;
float _bias =0;
float *_bias = NULL;
float _tbias = 0;
float _adamAvgMombias=0;
float _adamAvgVelbias=0;
float _mirrorbias =0;

Node(){};
Node(int dim, int nodeID, int layerID, NodeType type, int batchsize, float *weights, float bias, float *adamAvgMom, float *adamAvgVel);
void Update(int dim, int nodeID, int layerID, NodeType type, int batchsize, float *weights, float bias, float *adamAvgMom, float *adamAvgVel, train* train_blob);
void Update(int dim, int nodeID, int layerID, NodeType type, int batchsize, float *weights, float &bias, float *adamAvgMom, float *adamAvgVel, train* train_blob);
void updateWeights(float* newWeights, float newbias);
float getLastActivation(int inputID);
void incrementDelta(int inputID, float incrementValue);
Expand Down