Skip to content
Snippets Groups Projects
Commit df980730 authored by Guillaume Pasero's avatar Guillaume Pasero
Browse files

MRG: Synchronize with branch develop

Conflicts:
	.gitlab-ci.yml
	CI/ubuntu-18.04-llvm-shark.cmake
parents 9fed002d 35ee560b
No related branches found
No related tags found
No related merge requests found
......@@ -52,6 +52,11 @@ debian-build:
image: $BUILD_IMAGE_REGISTRY/otb-ubuntu-native:18.04
script:
- xvfb-run -a -n 1 -s "-screen 0 1024x768x24 -dpi 96" ctest -V -S CI/main_ci.cmake -DIMAGE_NAME:string=ubuntu-18.04-llvm
artifacts:
paths:
- build/CookBook-*-html.tar.gz
- build/Documentation/Cookbook/latex/CookBook-*.pdf
- build/Documentation/Doxygen/OTB-Doxygen-*.tar.bz2
# This is needed to have only one pipeline in a merge request context
ubuntu-llvm:
......
......@@ -26,6 +26,7 @@ import re
import unittest
import sys
import json
import time
trace = False
......@@ -168,7 +169,7 @@ class Handler:
if key == "project":
project = value
if ( site == "" or stamp == "" or name == "" or project == ""):
print( "Not enougth argument given for buildid request \
print( "Missing argument for buildid request \
site:"+site+", stamp:"+stamp+", name:"+name+", project:"+project+".")
return
buildid_api = "/api/v1/getbuildid.php?"
......@@ -176,11 +177,18 @@ site:"+site+", stamp:"+stamp+", name:"+name+", project:"+project+".")
full_url = self.url + buildid_api + buildid_params
if trace:
print("full_url: "+full_url)
response = urllib.request.urlopen(full_url).read().decode()
if trace:
print ( "response: " + response )
nb_try = 6
build_id_regex = re.compile( "<buildid>([0-9]+)</buildid>" )
buildid = build_id_regex.search( response )
while nb_try:
response = urllib.request.urlopen(full_url).read().decode()
if trace:
print ( "response: " + response )
buildid = build_id_regex.search( response )
nb_try -= 1
if buildid or (nb_try == 0):
break
print("No build id, retry ...")
time.sleep(60)
if buildid:
self.buildid = buildid.group(1)
if trace:
......@@ -198,7 +206,7 @@ site:"+site+", stamp:"+stamp+", name:"+name+", project:"+project+".")
if ( buildid == "" ):
buildid = self.buildid
if ( buildid == "" ):
print( "Not enougth argument given to build url")
print( "Missing argument to build url")
return
build_url = self.url
build_url +="/buildSummary.php?"
......@@ -212,7 +220,7 @@ site:"+site+", stamp:"+stamp+", name:"+name+", project:"+project+".")
if ( buildid == "" ):
buildid = self.buildid
if ( buildid == "" ):
print( "Not enougth argument given to build Status")
print( "Missing argument to build Status")
return
full_url = self.url + "/api/v1/buildSummary.php?buildid=" + buildid
response = urllib.request.urlopen(full_url).read().decode()
......@@ -251,10 +259,13 @@ if __name__ == "__main__":
handler.GetSite()
handler.GetName()
handler.GetStamp()
handler.GetBuildId()
# handler.buildid="1"
cdash_url = handler.GetBuildUrl()
( state , error ) = handler.GetBuildStatus()
if handler.GetBuildId() is None:
cdash_url = "https://cdash.orfeo-toolbox.org"
state = 'failed'
error = "Failed to get build id"
else:
cdash_url = handler.GetBuildUrl()
( state , error ) = handler.GetBuildStatus()
if trace:
print ( "cdash_url is: " + cdash_url )
gitlab_url = "https://gitlab.orfeo-toolbox.org/api/v4/projects/"
......
......@@ -45,6 +45,12 @@ elseif(ci_ref_name)
endif()
endif()
# set pipelines to enable documentation
set(ci_cookbook_profiles mr develop release)
set(ci_doxygen_profiles mr develop release)
list(FIND ci_cookbook_profiles ${ci_profile} ci_do_cookbook)
list(FIND ci_doxygen_profiles ${ci_profile} ci_do_doxygen)
#Warning, this variable is used in cdash_status.py. If change from
# ${IMAGE_NAME} to something else do not forget to change it.
set (CTEST_SITE "${IMAGE_NAME}")
......@@ -64,6 +70,7 @@ set (OTB_LARGEINPUT_ROOT "") # todo
message(STATUS "CI profile : ${ci_profile}")
#The following file set the CONFIGURE_OPTIONS variable
set (ENABLE_DOXYGEN OFF)
set (CONFIGURE_OPTIONS "")
include ( "${CMAKE_CURRENT_LIST_DIR}/configure_option.cmake" )
......@@ -112,3 +119,12 @@ if ( NOT _test_rv EQUAL 0 )
endif()
ctest_submit()
if(ENABLE_DOXYGEN)
# compile doxygen
ctest_build(BUILD "${CTEST_BINARY_DIRECTORY}"
TARGET Documentation
RETURN_VALUE _doxy_rv
CAPTURE_CMAKE_ERROR _doxy_error
)
endif()
......@@ -22,9 +22,33 @@
set(site_option
"opencv_INCLUDE_DIR:PATH=/usr/include
OTB_USE_SHARK:BOOL=OFF
CMAKE_C_COMPILER:STRING=clang
CMAKE_CXX_COMPILER:STRING=clang++
CMAKE_EXE_LINKER_FLAGS:STRING=-fuse-ld=lld
CMAKE_MODULE_LINKER_FLAGS:STRING=-fuse-ld=lld
CMAKE_SHARED_LINKER_FLAGS:STRING=-fuse-ld=lld")
CMAKE_SHARED_LINKER_FLAGS:STRING=-fuse-ld=lld
CMAKE_C_COMPILER_LAUNCHER:STRING=ccache
CMAKE_CXX_COMPILER_LAUNCHER:STRING=ccache
OTB_USE_SHARK:BOOL=OFF")
if(NOT ${ci_do_cookbook} EQUAL -1)
set(site_option
"${site_option}
BUILD_COOKBOOK:BOOL=ON")
endif()
if(NOT ${ci_do_doxygen} EQUAL -1)
set(site_option
"${site_option}
BUILD_DOCUMENTATION:BOOL=ON
OTB_DOXYGEN_ITK_TAGFILE:FILEPATH=${CTEST_BINARY_DIRECTORY}/InsightDoxygenDocTag-4.13.0
OTB_DOXYGEN_ITK_DOXYGEN_URL:STRING=\"https://itk.org/Doxygen413/html\"
")
set (ENABLE_DOXYGEN ON)
# The ITK doxygen tag file needs to be patched before being used for OTB
# See otb-devutils/Scripts/tagfile_fix.py
message(STATUS "Get resources for Doxygen build ...")
execute_process(COMMAND wget https://www.orfeo-toolbox.org/packages/archives/Doxygen/InsightDoxygenDocTag-4.13.0.gz
COMMAND gzip -d InsightDoxygenDocTag-4.13.0.gz
WORKING_DIRECTORY ${CTEST_BINARY_DIRECTORY})
endif()
......@@ -426,7 +426,7 @@ def GenerateRstForApplications(rst_dir):
tagFile.write("\tapp_" + appName + "\n")
# Write application rst
with open(rst_dir + '/Applications/app_' + appName + '.rst', 'w') as appFile:
with open(rst_dir + '/Applications/app_' + appName + '.rst', 'w',encoding='utf-8') as appFile:
appFile.write(render_application(appName, appNames))
if __name__ == "__main__":
......
......@@ -25,8 +25,8 @@
int otbSharkNormalizeLabels(int itkNotUsed(argc), char* itkNotUsed(argv) [])
{
std::vector<unsigned int> inLabels = {2, 2, 3, 20, 1};
std::vector<unsigned int> expectedDictionary = {2, 3, 20, 1};
std::vector<unsigned int> expectedLabels = {0, 0, 1, 2, 3};
std::vector<unsigned int> expectedDictionary = {1, 2, 3, 20};
std::vector<unsigned int> expectedLabels = {1, 1, 2, 3, 0};
auto newLabels = inLabels;
std::vector<unsigned int> labelDict;
......
......@@ -53,6 +53,7 @@ SharkRandomForestsMachineLearningModel<TInputValue,TOutputValue>
this->m_IsRegressionSupported = false;
this->m_IsDoPredictBatchMultiThreaded = true;
this->m_NormalizeClassLabels = true;
this->m_ComputeMargin = false;
}
......
......@@ -26,6 +26,5 @@ A more detailed description can be found on the project website:
https://gitlab.orfeo-toolbox.org/jinglada/temporalgapfilling
"
GIT_REPOSITORY https://gitlab.orfeo-toolbox.org/jinglada/temporalgapfilling.git
# Commit on develop branch which includes patches for Windows support
GIT_TAG 9d97764f0411de1a32dd5e0a90bdcdd8adfa049f
GIT_TAG master
)
......@@ -135,13 +135,18 @@ template <class T> void ListSampleToSharkVector(const T * listSample, std::vecto
}
/** Shark assumes that labels are 0 ... (nbClasses-1). This function modifies the labels contained in the input vector and returns a vector with size = nbClasses which allows the translation from the normalised labels to the new ones oldLabel = dictionary[newLabel].
When we want to generate the image containing the probability for each class, we need to ensure that the probabilities are in the correct order wrt the incoming labels. We therefore sort the labels before building the encoding.
*/
template <typename T> void NormalizeLabelsAndGetDictionary(std::vector<T>& labels,
std::vector<T>& dictionary)
{
std::vector<T> sorted_labels = labels;
std::sort(std::begin(sorted_labels), std::end(sorted_labels));
auto last = std::unique(std::begin(sorted_labels), std::end(sorted_labels));
sorted_labels.erase(last, std::end(sorted_labels));
std::unordered_map<T, T> dictMap;
T labelCount{0};
for(const auto& l : labels)
for(const auto& l : sorted_labels)
{
if(dictMap.find(l)==dictMap.end())
dictMap.insert({l, labelCount++});
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment