Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
otb
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container Registry
Model registry
Operate
Environments
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Main Repositories
otb
Commits
ef7227bd
Commit
ef7227bd
authored
7 years ago
by
Cédric Traizet
Browse files
Options
Downloads
Patches
Plain Diff
the learning curve file can now be set in the app
parent
94ab0bf1
No related branches found
Branches containing commit
No related tags found
Tags containing commit
1 merge request
!4
Dimensionality reduction algorithms
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
include/AutoencoderModel.h
+7
-0
7 additions, 0 deletions
include/AutoencoderModel.h
include/AutoencoderModel.txx
+24
-5
24 additions, 5 deletions
include/AutoencoderModel.txx
include/cbTrainAutoencoder.txx
+13
-0
13 additions, 0 deletions
include/cbTrainAutoencoder.txx
with
44 additions
and
5 deletions
include/AutoencoderModel.h
+
7
−
0
View file @
ef7227bd
...
...
@@ -59,6 +59,12 @@ public:
itkGetMacro
(
Beta
,
itk
::
Array
<
double
>
);
itkSetMacro
(
Beta
,
itk
::
Array
<
double
>
);
itkGetMacro
(
WriteLearningCurve
,
bool
);
itkSetMacro
(
WriteLearningCurve
,
bool
);
itkGetMacro
(
LearningCurveFileName
,
std
::
string
);
itkSetMacro
(
LearningCurveFileName
,
std
::
string
);
bool
CanReadFile
(
const
std
::
string
&
filename
);
bool
CanWriteFile
(
const
std
::
string
&
filename
);
...
...
@@ -98,6 +104,7 @@ private:
itk
::
Array
<
double
>
m_Beta
;
// Sparsity regularization parameter
bool
m_WriteLearningCurve
;
// Flag for writting the learning curve into a txt file
std
::
string
m_LearningCurveFileName
;
// Name of the output learning curve printed after training
};
}
// end namespace otb
...
...
This diff is collapsed.
Click to expand it.
include/AutoencoderModel.txx
+
24
−
5
View file @
ef7227bd
...
...
@@ -25,6 +25,7 @@ template <class TInputValue, class AutoencoderType>
AutoencoderModel<TInputValue,AutoencoderType>::AutoencoderModel()
{
this->m_IsDoPredictBatchMultiThreaded = true;
this->m_WriteLearningCurve = false;
}
...
...
@@ -36,6 +37,7 @@ AutoencoderModel<TInputValue,AutoencoderType>::~AutoencoderModel()
template <class TInputValue, class AutoencoderType>
void AutoencoderModel<TInputValue,AutoencoderType>::Train()
{
std::cout << this->m_WriteLearningCurve << std::endl;
std::vector<shark::RealVector> features;
std::cout << "converting the input ListSample to Shark vector" << std::endl;
Shark::ListSampleToSharkVector(this->GetInputListSample(), features);
...
...
@@ -43,9 +45,16 @@ void AutoencoderModel<TInputValue,AutoencoderType>::Train()
shark::Data<shark::RealVector> inputSamples = shark::createDataFromRange( features );
std::ofstream ofs;
if (this->m_WriteLearningCurve =true)
{
ofs.open(m_LearningCurveFileName);
ofs << "learning curve" << std::endl;
}
/*
std::ofstream ofs("/mnt/data/home/traizetc/computation/learning_curve.txt"); //learning curve
ofs << "learning curve" << std::endl;
*/
if (m_Epsilon > 0){
shark::TrainingProgress<> criterion(5,m_Epsilon);
...
...
@@ -106,15 +115,19 @@ void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneLayer(shark::Abstrac
optimizer.init(error);
std::cout<<"error before training : " << optimizer.solution().value<<std::endl;
File << "end layer" << std::endl;
if (this->m_WriteLearningCurve =true)
{
File << "end layer" << std::endl;
}
unsigned int i=0;
do{
i++;
optimizer.step(error);
if (this->m_WriteLearningCurve =true)
{
File << optimizer.solution().value << std::endl;
}
} while( !criterion.stop( optimizer.solution() ) );
std::cout<<"error after " << i << "iterations : " << optimizer.solution().value<<std::endl;
...
...
@@ -149,10 +162,16 @@ void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneSparseLayer(shark::A
do{
i++;
optimizer.step(error);
if (this->m_WriteLearningCurve =true)
{
File << optimizer.solution().value << std::endl;
}
} while( !criterion.stop( optimizer.solution() ) );
std::cout<<"error after " << i << "iterations : " << optimizer.solution().value<<std::endl;
if (this->m_WriteLearningCurve =true)
{
File << "end layer" << std::endl;
}
net.setParameterVector(optimizer.solution().point);
m_net.push_back(net);
samples = net.encode(samples);
...
...
This diff is collapsed.
Click to expand it.
include/cbTrainAutoencoder.txx
+
13
−
0
View file @
ef7227bd
...
...
@@ -80,6 +80,11 @@ cbLearningApplicationBaseDR<TInputValue,TOutputValue>
AddParameter(ParameterType_StringList, "model.autoencoder.beta", "Sparsity regularization strength");
SetParameterDescription("model.autoencoder.beta",
"Sparsity regularization strength");
AddParameter(ParameterType_OutputFilename, "model.autoencoder.learningcurve", "Learning curve");
SetParameterDescription("model.autoencoder.learningcurve", "Learning error values");
MandatoryOff("model.autoencoder.learningcurve");
}
...
...
@@ -146,6 +151,14 @@ void cbLearningApplicationBaseDR<TInputValue,TOutputValue>
dimredTrainer->SetNoise(noise);
dimredTrainer->SetRho(rho);
dimredTrainer->SetBeta(beta);
if (HasValue("model.autoencoder.learningcurve") && IsParameterEnabled("model.autoencoder.learningcurve"))
{
std::cout << "yo" << std::endl;
dimredTrainer->SetWriteLearningCurve(true);
dimredTrainer->SetLearningCurveFileName(GetParameterString("model.autoencoder.learningcurve"));
}
dimredTrainer->SetInputListSample(trainingListSample);
std::cout << "before train" << std::endl;
dimredTrainer->Train();
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment