Skip to content
Snippets Groups Projects
Commit a79f0d47 authored by Cédric Traizet's avatar Cédric Traizet
Browse files

Sparse ae added, parameters can now be defined for each layers (noise,...

Sparse ae added, parameters can now be defined for each layers (noise, regularization, rho and beta)
parent 00fb23c7
No related branches found
No related tags found
1 merge request!4Dimensionality reduction algorithms
......@@ -29,6 +29,7 @@
#include "DimensionalityReductionModelFactory.h"
#include "DimensionalityReductionModel.h"
#include <time.h>
namespace otb
{
namespace Wrapper
......@@ -115,7 +116,7 @@ class CbDimensionalityReductionVector : public Application
SetDocExampleParameterValue("featout", "perimeter area width");
//SetOfficialDocLink();
}
//
void DoUpdateParameters() ITK_OVERRIDE
{
......@@ -271,7 +272,7 @@ class CbDimensionalityReductionVector : public Application
otb::ogr::Layer outLayer = output->GetLayer(0);
OGRErr errStart = outLayer.ogr().StartTransaction();
/*
if (errStart != OGRERR_NONE)
{
itkExceptionMacro(<< "Unable to start transaction for OGR layer " << outLayer.ogr().GetName() << ".");
......@@ -279,25 +280,28 @@ class CbDimensionalityReductionVector : public Application
// Add the field of prediction in the output layer if field not exist
OGRFeatureDefn &layerDefn = layer.GetLayerDefn();
int idx = layerDefn.GetFieldIndex(GetParameterStringList("featout").c_str());
if (idx >= 0)
for (int i=0; i<GetParameterStringList("featout").size() ;i++)
{
if (layerDefn.GetFieldDefn(idx)->GetType() != OFTInteger)
itkExceptionMacro("Field name "<< GetParameterStringList("featout") << " already exists with a different type!");
}
else
{
OGRFieldDefn predictedField(GetParameterStringList("featout").c_str(), OFTInteger);
ogr::FieldDefn predictedFieldDef(predictedField);
outLayer.CreateField(predictedFieldDef);
OGRFeatureDefn &layerDefn = outLayer.GetLayerDefn();
int idx = layerDefn.GetFieldIndex(GetParameterStringList("featout")[i].c_str());
if (idx >= 0)
{
if (layerDefn.GetFieldDefn(idx)->GetType() != OFTReal)
itkExceptionMacro("Field name "<< GetParameterStringList("featout")[i] << " already exists with a different type!");
}
else
{
OGRFieldDefn predictedField(GetParameterStringList("featout")[i].c_str(), OFTReal);
ogr::FieldDefn predictedFieldDef(predictedField);
outLayer.CreateField(predictedFieldDef);
}
}
// Fill output layer
unsigned int count=0;
std::string classfieldname = GetParameterStringList("featout");
auto classfieldname = GetParameterStringList("featout");
it = layer.cbegin();
itEnd = layer.cend();
for( ; it!=itEnd ; ++it, ++count)
......@@ -305,8 +309,9 @@ class CbDimensionalityReductionVector : public Application
ogr::Feature dstFeature(outLayer.GetLayerDefn());
dstFeature.SetFrom( *it , TRUE);
dstFeature.SetFID(it->GetFID());
dstFeature[classfieldname].SetValue<int>(target->GetMeasurementVector(count)[0]);
for (std::size_t i=0; i<classfieldname.size(); ++i){
dstFeature[classfieldname[i]].SetValue<ValueType>(target->GetMeasurementVector(count)[i]);
}
if (updateMode)
{
outLayer.SetFeature(dstFeature);
......@@ -316,6 +321,7 @@ class CbDimensionalityReductionVector : public Application
outLayer.CreateFeature(dstFeature);
}
}
if(outLayer.ogr().TestCapability("Transactions"))
{
const OGRErr errCommitX = outLayer.ogr().CommitTransaction();
......@@ -326,7 +332,7 @@ class CbDimensionalityReductionVector : public Application
}
output->SyncToDisk();
clock_t toc = clock();
otbAppLogINFO( "Elapsed: "<< ((double)(toc - tic) / CLOCKS_PER_SEC)<<" seconds.");*/
otbAppLogINFO( "Elapsed: "<< ((double)(toc - tic) / CLOCKS_PER_SEC)<<" seconds.");
}
ModelPointerType m_Model;
......
......@@ -34,17 +34,17 @@ public:
itkGetMacro(NumberOfIterations,unsigned int);
itkSetMacro(NumberOfIterations,unsigned int);
itkGetMacro(Regularization,double);
itkSetMacro(Regularization,double);
itkGetMacro(Regularization,itk::Array<double>);
itkSetMacro(Regularization,itk::Array<double>);
itkGetMacro(Noise,double);
itkSetMacro(Noise,double);
itkGetMacro(Noise,itk::Array<double>);
itkSetMacro(Noise,itk::Array<double>);
itkGetMacro(rho,double);
itkSetMacro(rho,double);
itkGetMacro(Rho,itk::Array<double>);
itkSetMacro(Rho,itk::Array<double>);
itkGetMacro(beta,double);
itkSetMacro(beta,double);
itkGetMacro(Beta,itk::Array<double>);
itkSetMacro(Beta,itk::Array<double>);
bool CanReadFile(const std::string & filename);
bool CanWriteFile(const std::string & filename);
......@@ -53,7 +53,8 @@ public:
void Load(const std::string & filename, const std::string & name="") ITK_OVERRIDE;
void Train() ITK_OVERRIDE;
void TrainOneLayer(unsigned int, shark::Data<shark::RealVector> &);
void TrainOneLayer(unsigned int,double, double, shark::Data<shark::RealVector> &);
void TrainOneSparseLayer(unsigned int,double, double,double, shark::Data<shark::RealVector> &);
protected:
AutoencoderModel();
......@@ -71,10 +72,10 @@ private:
itk::Array<unsigned int> m_NumberOfHiddenNeurons;
/** Training parameters */
unsigned int m_NumberOfIterations;
double m_Regularization; // L2 Regularization parameter
double m_Noise; // probability for an input to be set to 0 (denosing autoencoder)
double m_rho; // Sparsity parameter
double m_beta; // Sparsity regularization parameter
itk::Array<double> m_Regularization; // L2 Regularization parameter
itk::Array<double> m_Noise; // probability for an input to be set to 0 (denosing autoencoder)
itk::Array<double> m_Rho; // Sparsity parameter
itk::Array<double> m_Beta; // Sparsity regularization parameter
};
} // end namespace otb
......
......@@ -20,7 +20,6 @@
namespace otb
{
template <class TInputValue, class AutoencoderType>
AutoencoderModel<TInputValue,AutoencoderType>::AutoencoderModel()
{
......@@ -42,31 +41,34 @@ void AutoencoderModel<TInputValue,AutoencoderType>::Train()
for (unsigned int i = 0 ; i < m_NumberOfHiddenNeurons.Size(); ++i)
{
TrainOneLayer( m_NumberOfHiddenNeurons[i], inputSamples);
if (m_Noise[i] != 0) // Shark doesn't allow to train a layer using a sparsity term AND a noisy input. (shark::SparseAutoencoderError takes an autoen
{
TrainOneLayer( m_NumberOfHiddenNeurons[i],m_Noise[i],m_Regularization[i], inputSamples);
}
else
{
TrainOneSparseLayer( m_NumberOfHiddenNeurons[i],m_Rho[i],m_Beta[i],m_Regularization[i], inputSamples);
}
}
}
template <class TInputValue, class AutoencoderType>
void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneLayer(unsigned int nbneuron, shark::Data<shark::RealVector> &samples)
void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneLayer(unsigned int nbneuron,double noise_strength,double regularization, shark::Data<shark::RealVector> &samples)
{
AutoencoderType net;
/*std::vector<shark::RealVector> features;
Shark::ListSampleToSharkVector(this->GetInputListSample(), features);
shark::Data<shark::RealVector> inputSamples = shark::createDataFromRange( features );
*/ //in Train() now
std::size_t inputs = dataDimension(samples);
net.setStructure(inputs, nbneuron);
initRandomUniform(net,-0.1*std::sqrt(1.0/inputs),0.1*std::sqrt(1.0/inputs));
shark::ImpulseNoiseModel noise(m_Noise,0.0); //set an input pixel with probability m_Noise to 0
shark::ImpulseNoiseModel noise(noise_strength,0.0); //set an input pixel with probability m_Noise to 0
shark::ConcatenatedModel<shark::RealVector,shark::RealVector> model = noise>> net;
shark::LabeledData<shark::RealVector,shark::RealVector> trainSet(samples,samples);//labels identical to inputs
shark::SquaredLoss<shark::RealVector> loss;
shark::ErrorFunction error(trainSet, &model, &loss);
//shark::SparseAutoencoderError error(data,&model, &loss, m_rho, m_beta);
//shark::SparseAutoencoderError error(trainSet,&model, &loss, m_Rho, m_Beta);
//shark::SparseAutoencoderError error(trainSet,&net, &loss, 0.1, 0.1);
shark::TwoNormRegularizer regularizer(error.numberOfVariables());
error.setRegularizer(m_Regularization,&regularizer);
error.setRegularizer(regularization,&regularizer);
shark::IRpropPlusFull optimizer;
error.init();
......@@ -82,6 +84,35 @@ void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneLayer(unsigned int n
}
template <class TInputValue, class AutoencoderType>
void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneSparseLayer(unsigned int nbneuron,double rho,double beta, double regularization, shark::Data<shark::RealVector> &samples)
{
AutoencoderType net;
std::size_t inputs = dataDimension(samples);
net.setStructure(inputs, nbneuron);
initRandomUniform(net,-0.1*std::sqrt(1.0/inputs),0.1*std::sqrt(1.0/inputs));
shark::LabeledData<shark::RealVector,shark::RealVector> trainSet(samples,samples);//labels identical to inputs
shark::SquaredLoss<shark::RealVector> loss;
shark::SparseAutoencoderError error(trainSet,&net, &loss, rho, beta);
shark::TwoNormRegularizer regularizer(error.numberOfVariables());
error.setRegularizer(regularization,&regularizer);
shark::IRpropPlusFull optimizer;
error.init();
optimizer.init(error);
std::cout<<"Optimizing model: "+net.name()<<std::endl;
for(std::size_t i = 0; i != m_NumberOfIterations; ++i){
optimizer.step(error);
std::cout<<i<<" "<<optimizer.solution().value<<std::endl;
}
net.setParameterVector(optimizer.solution().point);
m_net.push_back(net);
samples = net.encode(samples);
}
template <class TInputValue, class AutoencoderType>
bool AutoencoderModel<TInputValue,AutoencoderType>::CanReadFile(const std::string & filename)
{
......
......@@ -55,16 +55,24 @@ cbLearningApplicationBaseDR<TInputValue,TOutputValue>
"The number of neurons in each hidden layer.");
//Regularization
AddParameter(ParameterType_Float, "model.autoencoder.regularization", "Strength of the regularization");
SetParameterFloat("model.autoencoder.regularization",0, false);
AddParameter(ParameterType_StringList, "model.autoencoder.regularization", "Strength of the regularization");
SetParameterDescription("model.autoencoder.regularization",
"Strength of the L2 regularization used during training");
//Noise strength
AddParameter(ParameterType_Float, "model.autoencoder.noise", "Strength of the noise");
SetParameterFloat("model.autoencoder.noise",0, false);
AddParameter(ParameterType_StringList, "model.autoencoder.noise", "Strength of the noise");
SetParameterDescription("model.autoencoder.noise",
"Strength of the noise");
// Sparsity parameter
AddParameter(ParameterType_StringList, "model.autoencoder.rho", "Sparsity parameter");
SetParameterDescription("model.autoencoder.rho",
"Sparsity parameter");
// Sparsity regularization strength
AddParameter(ParameterType_StringList, "model.autoencoder.beta", "Sparsity regularization strength");
SetParameterDescription("model.autoencoder.beta",
"Sparsity regularization strength");
}
......@@ -102,16 +110,34 @@ void cbLearningApplicationBaseDR<TInputValue,TOutputValue>
{
typename autoencoderchoice::Pointer dimredTrainer = autoencoderchoice::New();
itk::Array<unsigned int> nb_neuron;
std::vector<std::basic_string<char>> s= GetParameterStringList("model.autoencoder.nbneuron");
nb_neuron.SetSize(s.size());
for (int i=0; i<s.size(); i++){ // This will be templated later (the 3)
nb_neuron[i]=std::stoi(s[i]);
itk::Array<float> noise;
itk::Array<float> regularization;
itk::Array<float> rho;
itk::Array<float> beta;
std::vector<std::basic_string<char>> s_nbneuron= GetParameterStringList("model.autoencoder.nbneuron");
std::vector<std::basic_string<char>> s_noise= GetParameterStringList("model.autoencoder.noise");
std::vector<std::basic_string<char>> s_regularization= GetParameterStringList("model.autoencoder.regularization");
std::vector<std::basic_string<char>> s_rho= GetParameterStringList("model.autoencoder.rho");
std::vector<std::basic_string<char>> s_beta= GetParameterStringList("model.autoencoder.beta");
nb_neuron.SetSize(s_nbneuron.size());
noise.SetSize(s_nbneuron.size());
regularization.SetSize(s_nbneuron.size());
rho.SetSize(s_nbneuron.size());
beta.SetSize(s_nbneuron.size());
for (int i=0; i<s_nbneuron.size(); i++){
nb_neuron[i]=std::stoi(s_nbneuron[i]);
noise[i]=std::stof(s_noise[i]);
regularization[i]=std::stof(s_regularization[i]);
rho[i]=std::stof(s_rho[i]);
beta[i]=std::stof(s_beta[i]);
}
std::cout << nb_neuron << std::endl;
dimredTrainer->SetNumberOfHiddenNeurons(nb_neuron);
dimredTrainer->SetNumberOfIterations(GetParameterInt("model.autoencoder.nbiter"));
dimredTrainer->SetRegularization(GetParameterFloat("model.autoencoder.regularization"));
dimredTrainer->SetNoise(GetParameterFloat("model.autoencoder.noise"));
dimredTrainer->SetRegularization(regularization);
dimredTrainer->SetNoise(noise);
dimredTrainer->SetRho(rho);
dimredTrainer->SetBeta(beta);
dimredTrainer->SetInputListSample(trainingListSample);
std::cout << "before train" << std::endl;
dimredTrainer->Train();
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment