Commit b2fc854c authored by Cédric Traizet's avatar Cédric Traizet

Started refactoring of ae (paused)

parent af9aeea9
......@@ -6,9 +6,11 @@
#include <fstream>
#include <shark/Algorithms/StoppingCriteria/AbstractStoppingCriterion.h>
#include <shark/Models/FFNet.h>
#include <shark/Models/Autoencoder.h>
namespace otb
{
template <class TInputValue, class AutoencoderType>
template <class TInputValue, class NeuronType>
class ITK_EXPORT AutoencoderModel: public MachineLearningModel<itk::VariableLengthVector< TInputValue> , itk::VariableLengthVector< TInputValue>>
{
......@@ -33,7 +35,12 @@ public:
typedef typename Superclass::ConfidenceSampleType ConfidenceSampleType;
typedef typename Superclass::ConfidenceListSampleType ConfidenceListSampleType;
/// Neural network related typedefs
typedef shark::Autoencoder<NeuronType,shark::LinearNeuron> OutAutoencoderType;
typedef shark::Autoencoder<NeuronType,NeuronType> AutoencoderType;
typedef shark::FFNet<NeuronType,shark::LinearNeuron> NetworkType;
itkNewMacro(Self);
itkTypeMacro(AutoencoderModel, DimensionalityReductionModel);
......@@ -77,10 +84,10 @@ public:
void Train() ITK_OVERRIDE;
template <class T>
void TrainOneLayer(shark::AbstractStoppingCriterion<T> & criterion, unsigned int,double, double, shark::Data<shark::RealVector> &, std::ostream&);
void TrainOneLayer(shark::AbstractStoppingCriterion<T> & criterion,unsigned int, unsigned int,double, double, shark::Data<shark::RealVector> &, std::ostream&);
template <class T>
void TrainOneSparseLayer(shark::AbstractStoppingCriterion<T> & criterion, unsigned int,double, double,double, shark::Data<shark::RealVector> &, std::ostream&);
void TrainOneSparseLayer(shark::AbstractStoppingCriterion<T> & criterion,unsigned int, unsigned int,double, double,double, shark::Data<shark::RealVector> &, std::ostream&);
protected:
AutoencoderModel();
......@@ -93,8 +100,8 @@ protected:
private:
/** Network attributes */
std::vector<AutoencoderType> m_net;
//std::vector<AutoencoderType> m_net;
NetworkType m_net;
itk::Array<unsigned int> m_NumberOfHiddenNeurons;
......
......@@ -21,40 +21,35 @@
namespace otb
{
template <class TInputValue, class AutoencoderType>
AutoencoderModel<TInputValue,AutoencoderType>::AutoencoderModel()
template <class TInputValue, class NeuronType>
AutoencoderModel<TInputValue,NeuronType>::AutoencoderModel()
{
this->m_IsDoPredictBatchMultiThreaded = true;
this->m_WriteLearningCurve = false;
}
template <class TInputValue, class AutoencoderType>
AutoencoderModel<TInputValue,AutoencoderType>::~AutoencoderModel()
template <class TInputValue, class NeuronType>
AutoencoderModel<TInputValue,NeuronType>::~AutoencoderModel()
{
}
template <class TInputValue, class AutoencoderType>
void AutoencoderModel<TInputValue,AutoencoderType>::Train()
template <class TInputValue, class NeuronType>
void AutoencoderModel<TInputValue,NeuronType>::Train()
{
std::cout << this->m_WriteLearningCurve << std::endl;
std::vector<shark::RealVector> features;
std::cout << "converting the input ListSample to Shark vector" << std::endl;
Shark::ListSampleToSharkVector(this->GetInputListSample(), features);
std::cout << "creating the data vector" << std::endl;
shark::Data<shark::RealVector> inputSamples = shark::createDataFromRange( features );
std::ofstream ofs;
if (this->m_WriteLearningCurve =true)
{
ofs.open(m_LearningCurveFileName);
ofs << "learning curve" << std::endl;
}
/*
std::ofstream ofs("/mnt/data/home/traizetc/computation/learning_curve.txt"); //learning curve
ofs << "learning curve" << std::endl;
*/
if (m_Epsilon > 0){
shark::TrainingProgress<> criterion(5,m_Epsilon);
......@@ -62,11 +57,11 @@ void AutoencoderModel<TInputValue,AutoencoderType>::Train()
{
if (m_Noise[i] != 0) // Shark doesn't allow to train a layer using a sparsity term AND a noisy input. (shark::SparseAutoencoderError takes an autoen
{
TrainOneLayer(criterion, m_NumberOfHiddenNeurons[i],m_Noise[i],m_Regularization[i], inputSamples,ofs);
TrainOneLayer(criterion,i , m_NumberOfHiddenNeurons[i],m_Noise[i],m_Regularization[i], inputSamples,ofs);
}
else
{
TrainOneSparseLayer( criterion,m_NumberOfHiddenNeurons[i],m_Rho[i],m_Beta[i],m_Regularization[i],inputSamples, ofs);
TrainOneSparseLayer( criterion,i , m_NumberOfHiddenNeurons[i],m_Rho[i],m_Beta[i],m_Regularization[i],inputSamples, ofs);
}
criterion.reset();
}
......@@ -80,11 +75,11 @@ void AutoencoderModel<TInputValue,AutoencoderType>::Train()
{
if (m_Noise[i] != 0) // Shark doesn't allow to train a layer using a sparsity term AND a noisy input. (shark::SparseAutoencoderError takes an autoen
{
TrainOneLayer(criterion, m_NumberOfHiddenNeurons[i],m_Noise[i],m_Regularization[i], inputSamples, ofs);
TrainOneLayer(criterion,i, m_NumberOfHiddenNeurons[i],m_Noise[i],m_Regularization[i], inputSamples, ofs);
}
else
{
TrainOneSparseLayer(criterion, m_NumberOfHiddenNeurons[i],m_Rho[i],m_Beta[i],m_Regularization[i], inputSamples, ofs);
TrainOneSparseLayer(criterion,i, m_NumberOfHiddenNeurons[i],m_Rho[i],m_Beta[i],m_Regularization[i], inputSamples, ofs);
}
criterion.reset();
}
......@@ -92,9 +87,9 @@ void AutoencoderModel<TInputValue,AutoencoderType>::Train()
}
}
template <class TInputValue, class AutoencoderType>
template <class TInputValue, class NeuronType>
template <class T>
void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneLayer(shark::AbstractStoppingCriterion<T> & criterion, unsigned int nbneuron,double noise_strength,double regularization, shark::Data<shark::RealVector> &samples, std::ostream& File)
void AutoencoderModel<TInputValue,NeuronType>::TrainOneLayer(shark::AbstractStoppingCriterion<T> & criterion,unsigned int layer_index, unsigned int nbneuron,double noise_strength,double regularization, shark::Data<shark::RealVector> &samples, std::ostream& File)
{
AutoencoderType net;
......@@ -133,14 +128,16 @@ void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneLayer(shark::Abstrac
std::cout<<"error after " << i << "iterations : " << optimizer.solution().value<<std::endl;
net.setParameterVector(optimizer.solution().point);
m_net.push_back(net);
// m_net.push_back(net);
m_net.setLayer(layer_index,net.encoderMatrix(),net.hiddenBias());
m_net.setLayer( m_NumberOfHiddenNeurons.Size()*2 - 1 - layer_index,net.decoderMatrix(),net.outputBias());
samples = net.encode(samples);
}
template <class TInputValue, class AutoencoderType>
template <class TInputValue, class NeuronType>
template <class T>
void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneSparseLayer(shark::AbstractStoppingCriterion<T> & criterion, unsigned int nbneuron,double rho,double beta, double regularization, shark::Data<shark::RealVector> &samples, std::ostream& File)
void AutoencoderModel<TInputValue,NeuronType>::TrainOneSparseLayer(shark::AbstractStoppingCriterion<T> & criterion,unsigned int layer_index, unsigned int nbneuron,double rho,double beta, double regularization, shark::Data<shark::RealVector> &samples, std::ostream& File)
{
AutoencoderType net;
......@@ -174,17 +171,19 @@ void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneSparseLayer(shark::A
File << "end layer" << std::endl;
}
net.setParameterVector(optimizer.solution().point);
m_net.push_back(net);
//m_net.push_back(net);
m_net.setLayer(layer_index,net.encoderMatrix(),net.hiddenBias());
m_net.setLayer( m_NumberOfHiddenNeurons.Size()*2 - 1 - layer_index,net.decoderMatrix(),net.outputBias());
samples = net.encode(samples);
}
template <class TInputValue, class AutoencoderType>
bool AutoencoderModel<TInputValue,AutoencoderType>::CanReadFile(const std::string & filename)
template <class TInputValue, class NeuronType>
bool AutoencoderModel<TInputValue,NeuronType>::CanReadFile(const std::string & filename)
{
try
{
this->Load(filename);
m_net[0].name();
m_net.name();
}
catch(...)
{
......@@ -194,27 +193,28 @@ bool AutoencoderModel<TInputValue,AutoencoderType>::CanReadFile(const std::strin
}
template <class TInputValue, class AutoencoderType>
bool AutoencoderModel<TInputValue,AutoencoderType>::CanWriteFile(const std::string & filename)
template <class TInputValue, class NeuronType>
bool AutoencoderModel<TInputValue,NeuronType>::CanWriteFile(const std::string & filename)
{
return true;
}
template <class TInputValue, class AutoencoderType>
void AutoencoderModel<TInputValue,AutoencoderType>::Save(const std::string & filename, const std::string & name)
template <class TInputValue, class NeuronType>
void AutoencoderModel<TInputValue,NeuronType>::Save(const std::string & filename, const std::string & name)
{
std::ofstream ofs(filename);
ofs << m_net[0].name() << std::endl; // the first line of the model file contains a key
ofs << m_net.name() << std::endl; // the first line of the model file contains a key
boost::archive::polymorphic_text_oarchive oa(ofs);
//m_net.write(oa);
oa << m_net;
ofs.close();
/*
if (this->m_WriteWeights == true) // output the map vectors in a txt file
{
std::ofstream otxt(filename+".txt");
for (unsigned int i = 0 ; i < m_NumberOfHiddenNeurons.Size(); ++i)
{
otxt << m_net[i].encoderMatrix() << std::endl;
......@@ -248,19 +248,22 @@ void AutoencoderModel<TInputValue,AutoencoderType>::Save(const std::string & fil
}
*/
}
template <class TInputValue, class AutoencoderType>
void AutoencoderModel<TInputValue,AutoencoderType>::Load(const std::string & filename, const std::string & name)
template <class TInputValue, class NeuronType>
void AutoencoderModel<TInputValue,NeuronType>::Load(const std::string & filename, const std::string & name)
{
AutoencoderType net;
NetworkType net;
std::ifstream ifs(filename);
char autoencoder[256];
ifs.getline(autoencoder,256);
std::string autoencoderstr(autoencoder);
std::cout << autoencoderstr << std::endl;
if (autoencoderstr != net.name()){
itkExceptionMacro(<< "Error opening " << filename.c_str() );
}
......@@ -274,13 +277,15 @@ void AutoencoderModel<TInputValue,AutoencoderType>::Load(const std::string & fil
m_NumberOfHiddenNeurons[i] = m_net[i].numberOfHiddenNeurons();
}
this->m_Dimension = m_NumberOfHiddenNeurons[m_net.size()-1];
}
template <class TInputValue, class AutoencoderType>
typename AutoencoderModel<TInputValue,AutoencoderType>::TargetSampleType
AutoencoderModel<TInputValue,AutoencoderType>::DoPredict(const InputSampleType & value, ConfidenceValueType * quality) const
template <class TInputValue, class NeuronType>
typename AutoencoderModel<TInputValue,NeuronType>::TargetSampleType
AutoencoderModel<TInputValue,NeuronType>::DoPredict(const InputSampleType & value, ConfidenceValueType * quality) const
{
/*
shark::RealVector samples(value.Size());
for(size_t i = 0; i < value.Size();i++)
{
......@@ -302,14 +307,15 @@ AutoencoderModel<TInputValue,AutoencoderType>::DoPredict(const InputSampleType &
target[a]=data.element(0)[a];
}
return target;
*/
}
template <class TInputValue, class AutoencoderType>
void AutoencoderModel<TInputValue,AutoencoderType>
template <class TInputValue, class NeuronType>
void AutoencoderModel<TInputValue,NeuronType>
::DoPredictBatch(const InputListSampleType *input, const unsigned int & startIndex, const unsigned int & size, TargetListSampleType * targets, ConfidenceListSampleType * quality) const
{
/*
std::vector<shark::RealVector> features;
Shark::ListSampleRangeToSharkVector(input, features,startIndex,size);
shark::Data<shark::RealVector> data = shark::createDataFromRange(features);
......@@ -329,6 +335,7 @@ void AutoencoderModel<TInputValue,AutoencoderType>
targets->SetMeasurementVector(id,target);
++id;
}
*/
}
} // namespace otb
......
......@@ -10,7 +10,7 @@
namespace otb
{
template <class TInputValue, class TTargetValue, class AutoencoderType>
template <class TInputValue, class TTargetValue, class NeuronType>
class ITK_EXPORT AutoencoderModelFactoryBase : public itk::ObjectFactoryBase
{
public:
......
......@@ -27,8 +27,8 @@
namespace otb
{
template <class TInputValue, class TOutputValue, class AutoencoderType>
AutoencoderModelFactoryBase<TInputValue,TOutputValue, AutoencoderType>::AutoencoderModelFactoryBase()
template <class TInputValue, class TOutputValue, class NeuronType>
AutoencoderModelFactoryBase<TInputValue,TOutputValue, NeuronType>::AutoencoderModelFactoryBase()
{
std::string classOverride = std::string("DimensionalityReductionModel");
......@@ -39,22 +39,22 @@ AutoencoderModelFactoryBase<TInputValue,TOutputValue, AutoencoderType>::Autoenco
"Shark AE ML Model",
1,
// itk::CreateObjectFunction<AutoencoderModel<TInputValue,TOutputValue> >::New());
itk::CreateObjectFunction<AutoencoderModel<TInputValue,AutoencoderType > >::New());
itk::CreateObjectFunction<AutoencoderModel<TInputValue,NeuronType > >::New());
}
template <class TInputValue, class TOutputValue, class AutoencoderType>
AutoencoderModelFactoryBase<TInputValue,TOutputValue, AutoencoderType>::~AutoencoderModelFactoryBase()
template <class TInputValue, class TOutputValue, class NeuronType>
AutoencoderModelFactoryBase<TInputValue,TOutputValue, NeuronType>::~AutoencoderModelFactoryBase()
{
}
template <class TInputValue, class TOutputValue, class AutoencoderType>
const char* AutoencoderModelFactoryBase<TInputValue,TOutputValue, AutoencoderType>::GetITKSourceVersion(void) const
template <class TInputValue, class TOutputValue, class NeuronType>
const char* AutoencoderModelFactoryBase<TInputValue,TOutputValue, NeuronType>::GetITKSourceVersion(void) const
{
return ITK_SOURCE_VERSION;
}
template <class TInputValue, class TOutputValue, class AutoencoderType>
const char* AutoencoderModelFactoryBase<TInputValue,TOutputValue, AutoencoderType>::GetDescription() const
template <class TInputValue, class TOutputValue, class NeuronType>
const char* AutoencoderModelFactoryBase<TInputValue,TOutputValue, NeuronType>::GetDescription() const
{
return "Autoencoder model factory";
}
......
......@@ -34,6 +34,7 @@
namespace otb
{
/*
template <class TInputValue, class TTargetValue>
// using AutoencoderModelFactory = AutoencoderModelFactoryBase<TInputValue, TTargetValue, shark::Autoencoder<shark::TanhNeuron, shark::LinearNeuron>> ;
using AutoencoderModelFactory = AutoencoderModelFactoryBase<TInputValue, TTargetValue, shark::Autoencoder<shark::TanhNeuron, shark::TanhNeuron>> ;
......@@ -42,6 +43,10 @@ using AutoencoderModelFactory = AutoencoderModelFactoryBase<TInputValue, TTarget
template <class TInputValue, class TTargetValue>
// using TiedAutoencoderModelFactory = AutoencoderModelFactoryBase<TInputValue, TTargetValue, shark::TiedAutoencoder< shark::TanhNeuron, shark::LinearNeuron>> ;
using TiedAutoencoderModelFactory = AutoencoderModelFactoryBase<TInputValue, TTargetValue, shark::TiedAutoencoder< shark::TanhNeuron, shark::TanhNeuron>> ;
*/
template <class TInputValue, class TTargetValue>
using AutoencoderModelFactory = AutoencoderModelFactoryBase<TInputValue, TTargetValue, shark::TanhNeuron> ;
template <class TInputValue, class TTargetValue>
......@@ -125,7 +130,7 @@ DimensionalityReductionModelFactory<TInputValue,TOutputValue>
#ifdef OTB_USE_SHARK
RegisterFactory(PCAModelFactory<TInputValue,TOutputValue>::New());
RegisterFactory(AutoencoderModelFactory<TInputValue,TOutputValue>::New());
RegisterFactory(TiedAutoencoderModelFactory<TInputValue,TOutputValue>::New());
// RegisterFactory(TiedAutoencoderModelFactory<TInputValue,TOutputValue>::New());
#endif
}
......@@ -200,6 +205,7 @@ DimensionalityReductionModelFactory<TInputValue,TOutputValue>
continue;
}
/*
TiedAutoencoderModelFactory<TInputValue,TOutputValue> *taeFactory =
dynamic_cast<TiedAutoencoderModelFactory<TInputValue,TOutputValue> *>(*itFac);
if (taeFactory)
......@@ -207,7 +213,7 @@ DimensionalityReductionModelFactory<TInputValue,TOutputValue>
itk::ObjectFactoryBase::UnRegisterFactory(taeFactory);
continue;
}
*/
// PCA
PCAModelFactory<TInputValue,TOutputValue> *pcaFactory =
dynamic_cast<PCAModelFactory<TInputValue,TOutputValue> *>(*itFac);
......
......@@ -103,13 +103,13 @@ public:
#ifdef OTB_USE_SHARK
// typedef shark::Autoencoder< shark::TanhNeuron, shark::LinearNeuron> AutoencoderType;
typedef shark::Autoencoder< shark::TanhNeuron, shark::TanhNeuron> AutoencoderType;
typedef otb::AutoencoderModel<InputValueType, AutoencoderType> AutoencoderModelType;
typedef shark::TanhNeuron NeuronType;
typedef otb::AutoencoderModel<InputValueType, NeuronType> AutoencoderModelType;
/*
// typedef shark::TiedAutoencoder< shark::TanhNeuron, shark::LinearNeuron> TiedAutoencoderType;
typedef shark::TiedAutoencoder< shark::TanhNeuron, shark::TanhNeuron> TiedAutoencoderType;
typedef otb::AutoencoderModel<InputValueType, TiedAutoencoderType> TiedAutoencoderModelType;
*/
typedef otb::PCAModel<InputValueType> PCAModelType;
#endif
......
......@@ -89,6 +89,7 @@ cbLearningApplicationBaseDR<TInputValue,TOutputValue>
otbAppLogFATAL("Module SharkLearning is not installed. You should consider turning OTB_USE_SHARK on during cmake configuration.");
#endif
}
/*
if(modelName == "tiedautoencoder")
{
#ifdef OTB_USE_SHARK
......@@ -97,7 +98,7 @@ cbLearningApplicationBaseDR<TInputValue,TOutputValue>
otbAppLogFATAL("Module SharkLearning is not installed. You should consider turning OTB_USE_SHARK on during cmake configuration.");
#endif
}
*/
if(modelName == "pca")
{
#ifdef OTB_USE_SHARK
......
......@@ -101,12 +101,12 @@ cbLearningApplicationBaseDR<TInputValue,TOutputValue>
{
TrainAutoencoder<AutoencoderModelType>(trainingListSample,modelPath);
}
/*
if(TiedWeigth == "yes")
{
TrainAutoencoder<TiedAutoencoderModelType>(trainingListSample,modelPath);
}
*/
if(TiedWeigth != "yes" && TiedWeigth != "no")
{
std::cerr << "istied : invalid choice <yes/no>" << std::endl;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment