Skip to content

Commit b5345f5

Browse files
Gilkarlnapf
Gil
authored andcommitted
SG_ADD refactor (#4417)
* replaced variable macro with a single macro [ci skip] * with the new AnyParameterProperties we can clean up the macros a bit and have a single macro with four parameters * inside the macro we still have exactly the same process * refactored all SG_ADD macros to use ParameterProperties [ci skip] * added SG_ADD3 for default parameters [ci skip] also refactored the SG_ADD calls respectively
1 parent ed31a10 commit b5345f5

File tree

323 files changed

+1233
-1463
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

323 files changed

+1233
-1463
lines changed

src/shogun/base/SGObject.h

+16-19
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,8 @@
44
* Authors: Heiko Strathmann, Soeren Sonnenburg, Sergey Lisitsyn,
55
* Giovanni De Toni, Jacob Walker, Thoralf Klein, Chiyuan Zhang,
66
* Fernando Iglesias, Sanuj Sharma, Roman Votyakov, Yuyu Zhang,
7-
* Viktor Gal, Bjoern Esser, Evangelos Anagnostopoulos, Pan Deng
7+
* Viktor Gal, Bjoern Esser, Evangelos Anagnostopoulos, Pan Deng,
8+
* Gil Hoben
89
*/
910

1011
#ifndef __SGOBJECT_H__
@@ -56,55 +57,51 @@ template <class T> class SGStringList;
5657
#define SG_UNREF_NO_NULL(x) { if (x) { (x)->unref(); } }
5758

5859
/*******************************************************************************
59-
* Macros for registering parameters/model selection parameters
60+
* Macros for registering parameter properties
6061
******************************************************************************/
6162

6263
#ifdef _MSC_VER
6364

6465
#define VA_NARGS(...) INTERNAL_EXPAND_ARGS_PRIVATE(INTERNAL_ARGS_AUGMENTER(__VA_ARGS__))
6566
#define INTERNAL_ARGS_AUGMENTER(...) unused, __VA_ARGS__
6667
#define INTERNAL_EXPAND(x) x
67-
#define INTERNAL_EXPAND_ARGS_PRIVATE(...) INTERNAL_EXPAND(INTERNAL_GET_ARG_COUNT_PRIVATE(__VA_ARGS__, 5, 4, 3, 2, 1, 0))
68-
#define INTERNAL_GET_ARG_COUNT_PRIVATE(_0_, _1_, _2_, _3_, _4_, _5_, count, ...) count
68+
#define INTERNAL_EXPAND_ARGS_PRIVATE(...) INTERNAL_EXPAND(INTERNAL_GET_ARG_COUNT_PRIVATE(__VA_ARGS__, 4, 3, 2, 1, 0))
69+
#define INTERNAL_GET_ARG_COUNT_PRIVATE(_0_, _1_, _2_, _3_, _4_, count, ...) count
6970

7071
#else
7172

72-
#define VA_NARGS_IMPL(_1, _2, _3, _4, _5, N, ...) N
73-
#define VA_NARGS(...) VA_NARGS_IMPL(__VA_ARGS__, 5, 4, 3, 2, 1)
73+
#define VA_NARGS_IMPL(_1, _2, _3, _4, N, ...) N
74+
#define VA_NARGS(...) VA_NARGS_IMPL(__VA_ARGS__, 4, 3, 2, 1)
7475

7576
#endif
7677

7778
#define VARARG_IMPL2(base, count, ...) base##count(__VA_ARGS__)
7879
#define VARARG_IMPL(base, count, ...) VARARG_IMPL2(base, count, __VA_ARGS__)
7980
#define VARARG(base, ...) VARARG_IMPL(base, VA_NARGS(__VA_ARGS__), __VA_ARGS__)
8081

81-
#define SG_ADD4(param, name, description, ms_available) \
82+
#define SG_ADD3(param, name, description) \
8283
{ \
8384
this->m_parameters->add(param, name, description); \
8485
this->watch_param( \
85-
name, param, \
86-
AnyParameterProperties( \
87-
description, ms_available, GRADIENT_NOT_AVAILABLE)); \
88-
if (ms_available) \
89-
this->m_model_selection_parameters->add(param, name, description); \
86+
name, param, AnyParameterProperties()); \
9087
}
9188

92-
#define SG_ADD5(param, name, description, ms_available, gradient_available) \
89+
#define SG_ADD4(param, name, description, param_properties) \
9390
{ \
91+
AnyParameterProperties pprop = \
92+
AnyParameterProperties(description, param_properties); \
9493
this->m_parameters->add(param, name, description); \
95-
this->watch_param( \
96-
name, param, AnyParameterProperties( \
97-
description, ms_available, gradient_available)); \
98-
if (ms_available) \
94+
this->watch_param(name, param, pprop); \
95+
if (pprop.get_model_selection()) \
9996
this->m_model_selection_parameters->add(param, name, description); \
100-
if (gradient_available) \
97+
if (pprop.get_gradient()) \
10198
this->m_gradient_parameters->add(param, name, description); \
10299
}
103100

104101
#define SG_ADD(...) VARARG(SG_ADD, __VA_ARGS__)
105102

106103
/*******************************************************************************
107-
* End of macros for registering parameters/model selection parameters
104+
* End of macros for registering parameter properties
108105
******************************************************************************/
109106

110107
/** @brief Class SGObject is the base class of all shogun objects.

src/shogun/classifier/AveragedPerceptron.cpp

+2-2
Original file line numberDiff line numberDiff line change
@@ -37,8 +37,8 @@ void CAveragedPerceptron::init()
3737
{
3838
max_iter = 1000;
3939
learn_rate = 0.1;
40-
SG_ADD(&max_iter, "max_iter", "Maximum number of iterations.", MS_AVAILABLE);
41-
SG_ADD(&learn_rate, "learn_rate", "Learning rate.", MS_AVAILABLE);
40+
SG_ADD(&max_iter, "max_iter", "Maximum number of iterations.", ParameterProperties::HYPER);
41+
SG_ADD(&learn_rate, "learn_rate", "Learning rate.", ParameterProperties::HYPER);
4242
}
4343

4444
bool CAveragedPerceptron::train_machine(CFeatures* data)

src/shogun/classifier/LDA.cpp

+3-3
Original file line numberDiff line numberDiff line change
@@ -49,9 +49,9 @@ void CLDA::init()
4949

5050
SG_ADD(
5151
(machine_int_t*)&m_method, "m_method",
52-
"Method used for LDA calculation", MS_NOT_AVAILABLE);
53-
SG_ADD(&m_gamma, "m_gamma", "Regularization parameter", MS_AVAILABLE);
54-
SG_ADD(&m_bdc_svd, "m_bdc_svd", "Use BDC-SVD algorithm", MS_NOT_AVAILABLE);
52+
"Method used for LDA calculation");
53+
SG_ADD(&m_gamma, "m_gamma", "Regularization parameter", ParameterProperties::HYPER);
54+
SG_ADD(&m_bdc_svd, "m_bdc_svd", "Use BDC-SVD algorithm");
5555
}
5656

5757
CLDA::~CLDA()

src/shogun/classifier/Perceptron.cpp

+2-2
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,8 @@ CPerceptron::CPerceptron() : CIterativeMachine<CLinearMachine>()
2424
m_initialize_hyperplane = true;
2525
SG_ADD(
2626
&m_initialize_hyperplane, "initialize_hyperplane",
27-
"Whether to initialize hyperplane.", MS_AVAILABLE);
28-
SG_ADD(&learn_rate, "learn_rate", "Learning rate.", MS_AVAILABLE);
27+
"Whether to initialize hyperplane.", ParameterProperties::HYPER);
28+
SG_ADD(&learn_rate, "learn_rate", "Learning rate.", ParameterProperties::HYPER);
2929
}
3030

3131
CPerceptron::~CPerceptron()

src/shogun/classifier/PluginEstimate.cpp

+5-9
Original file line numberDiff line numberDiff line change
@@ -21,18 +21,14 @@ CPluginEstimate::CPluginEstimate(float64_t pos_pseudo, float64_t neg_pseudo)
2121
pos_model(NULL), neg_model(NULL), features(NULL)
2222
{
2323
SG_ADD(
24-
&m_pos_pseudo, "pos_pseudo", "pseudo count for positive class",
25-
MS_NOT_AVAILABLE);
24+
&m_pos_pseudo, "pos_pseudo", "pseudo count for positive class");
2625
SG_ADD(
27-
&m_neg_pseudo, "neg_pseudo", "pseudo count for negative class",
28-
MS_NOT_AVAILABLE);
26+
&m_neg_pseudo, "neg_pseudo", "pseudo count for negative class");
2927
SG_ADD(
30-
&pos_model, "pos_model", "LinearHMM modelling positive class.",
31-
MS_NOT_AVAILABLE);
28+
&pos_model, "pos_model", "LinearHMM modelling positive class.");
3229
SG_ADD(
33-
&neg_model, "neg_model", "LinearHMM modelling negative class.",
34-
MS_NOT_AVAILABLE);
35-
SG_ADD(&features, "features", "String Features.", MS_NOT_AVAILABLE);
30+
&neg_model, "neg_model", "LinearHMM modelling negative class.");
31+
SG_ADD(&features, "features", "String Features.");
3632
}
3733

3834
CPluginEstimate::~CPluginEstimate()

src/shogun/classifier/mkl/MKL.cpp

+13-11
Original file line numberDiff line numberDiff line change
@@ -271,21 +271,23 @@ void CMKL::register_params()
271271
rho = 0;
272272
lp_initialized = false;
273273

274-
SG_ADD((CMachine**)&svm, "svm", "wrapper svm", MS_NOT_AVAILABLE);
275-
SG_ADD(&C_mkl, "C_mkl", "C mkl", MS_NOT_AVAILABLE);
276-
SG_ADD(&mkl_norm, "mkl_norm", "norm used in mkl", MS_NOT_AVAILABLE);
277-
SG_ADD(&ent_lambda, "ent_lambda", "elastic net sparsity trade-off parameter", MS_NOT_AVAILABLE);
278-
SG_ADD(&mkl_block_norm, "mkl_block_norm", "mkl sparse trade-off parameter", MS_NOT_AVAILABLE);
274+
SG_ADD((CMachine**)&svm, "svm", "wrapper svm");
275+
SG_ADD(&C_mkl, "C_mkl", "C mkl", ParameterProperties::HYPER);
276+
SG_ADD(&mkl_norm, "mkl_norm", "norm used in mkl");
277+
SG_ADD(&ent_lambda, "ent_lambda", "elastic net sparsity trade-off parameter",
278+
ParameterProperties::HYPER);
279+
SG_ADD(&mkl_block_norm, "mkl_block_norm", "mkl sparse trade-off parameter",
280+
ParameterProperties::HYPER);
279281

280282
m_parameters->add_vector(&beta_local, &beta_local_size, "beta_local", "subkernel weights on L1 term of elastic net mkl");
281283
watch_param("beta_local", &beta_local, &beta_local_size);
282284

283-
SG_ADD(&mkl_iterations, "mkl_iterations", "number of mkl steps", MS_NOT_AVAILABLE);
284-
SG_ADD(&mkl_epsilon, "mkl_epsilon", "mkl epsilon", MS_NOT_AVAILABLE);
285-
SG_ADD(&interleaved_optimization, "interleaved_optimization", "whether to use mkl wrapper or interleaved opt.", MS_NOT_AVAILABLE);
286-
SG_ADD(&w_gap, "w_gap", "gap between interactions", MS_NOT_AVAILABLE);
287-
SG_ADD(&rho, "rho", "objective after mkl iterations", MS_NOT_AVAILABLE);
288-
SG_ADD(&lp_initialized, "lp_initialized", "if lp is Initialized", MS_NOT_AVAILABLE);
285+
SG_ADD(&mkl_iterations, "mkl_iterations", "number of mkl steps");
286+
SG_ADD(&mkl_epsilon, "mkl_epsilon", "mkl epsilon");
287+
SG_ADD(&interleaved_optimization, "interleaved_optimization", "whether to use mkl wrapper or interleaved opt.");
288+
SG_ADD(&w_gap, "w_gap", "gap between interactions");
289+
SG_ADD(&rho, "rho", "objective after mkl iterations");
290+
SG_ADD(&lp_initialized, "lp_initialized", "if lp is Initialized");
289291
// Missing: self (3rd party specific, handled in clone())
290292
}
291293

src/shogun/classifier/svm/LibLinear.cpp

+7-8
Original file line numberDiff line numberDiff line change
@@ -50,18 +50,17 @@ void CLibLinear::init()
5050
set_max_iterations();
5151
set_epsilon(1e-5);
5252

53-
SG_ADD(&C1, "C1", "C Cost constant 1.", MS_AVAILABLE);
54-
SG_ADD(&C2, "C2", "C Cost constant 2.", MS_AVAILABLE);
53+
SG_ADD(&C1, "C1", "C Cost constant 1.", ParameterProperties::HYPER);
54+
SG_ADD(&C2, "C2", "C Cost constant 2.", ParameterProperties::HYPER);
5555
SG_ADD(
56-
&use_bias, "use_bias", "Indicates if bias is used.", MS_NOT_AVAILABLE);
57-
SG_ADD(&epsilon, "epsilon", "Convergence precision.", MS_NOT_AVAILABLE);
56+
&use_bias, "use_bias", "Indicates if bias is used.");
57+
SG_ADD(&epsilon, "epsilon", "Convergence precision.");
5858
SG_ADD(
59-
&max_iterations, "max_iterations", "Max number of iterations.",
60-
MS_NOT_AVAILABLE);
61-
SG_ADD(&m_linear_term, "linear_term", "Linear Term", MS_NOT_AVAILABLE);
59+
&max_iterations, "max_iterations", "Max number of iterations.");
60+
SG_ADD(&m_linear_term, "linear_term", "Linear Term");
6261
SG_ADD(
6362
(machine_int_t*)&liblinear_solver_type, "liblinear_solver_type",
64-
"Type of LibLinear solver.", MS_NOT_AVAILABLE);
63+
"Type of LibLinear solver.");
6564
}
6665

6766
CLibLinear::~CLibLinear()

src/shogun/classifier/svm/LibSVM.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ CLibSVM::~CLibSVM()
3636

3737
void CLibSVM::register_params()
3838
{
39-
SG_ADD((machine_int_t*) &solver_type, "libsvm_solver_type", "LibSVM Solver type", MS_NOT_AVAILABLE);
39+
SG_ADD((machine_int_t*) &solver_type, "libsvm_solver_type", "LibSVM Solver type");
4040
}
4141

4242
bool CLibSVM::train_machine(CFeatures* data)

src/shogun/classifier/svm/OnlineLibLinear.cpp

+3-3
Original file line numberDiff line numberDiff line change
@@ -63,10 +63,10 @@ void COnlineLibLinear::init()
6363
Cn=1;
6464
use_bias=false;
6565

66-
SG_ADD(&C1, "C1", "C Cost constant 1.", MS_AVAILABLE);
67-
SG_ADD(&C2, "C2", "C Cost constant 2.", MS_AVAILABLE);
66+
SG_ADD(&C1, "C1", "C Cost constant 1.", ParameterProperties::HYPER);
67+
SG_ADD(&C2, "C2", "C Cost constant 2.", ParameterProperties::HYPER);
6868
SG_ADD(
69-
&use_bias, "use_bias", "Indicates if bias is used.", MS_NOT_AVAILABLE);
69+
&use_bias, "use_bias", "Indicates if bias is used.");
7070

7171
PG = 0;
7272
PGmax_old = CMath::INFTY;

src/shogun/classifier/svm/OnlineSVMSGD.cpp

+10-10
Original file line numberDiff line numberDiff line change
@@ -206,17 +206,17 @@ void COnlineSVMSGD::init()
206206
loss=new CHingeLoss();
207207
SG_REF(loss);
208208

209-
SG_ADD(&C1, "C1", "Cost constant 1.", MS_AVAILABLE);
210-
SG_ADD(&C2, "C2", "Cost constant 2.", MS_AVAILABLE);
211-
SG_ADD(&lambda, "lambda", "Regularization parameter.", MS_AVAILABLE);
212-
SG_ADD(&wscale, "wscale", "W scale", MS_NOT_AVAILABLE);
213-
SG_ADD(&bscale, "bscale", "b scale", MS_NOT_AVAILABLE);
214-
SG_ADD(&epochs, "epochs", "epochs", MS_NOT_AVAILABLE);
215-
SG_ADD(&skip, "skip", "skip", MS_NOT_AVAILABLE);
216-
SG_ADD(&count, "count", "count", MS_NOT_AVAILABLE);
209+
SG_ADD(&C1, "C1", "Cost constant 1.", ParameterProperties::HYPER);
210+
SG_ADD(&C2, "C2", "Cost constant 2.", ParameterProperties::HYPER);
211+
SG_ADD(&lambda, "lambda", "Regularization parameter.", ParameterProperties::HYPER);
212+
SG_ADD(&wscale, "wscale", "W scale");
213+
SG_ADD(&bscale, "bscale", "b scale");
214+
SG_ADD(&epochs, "epochs", "epochs");
215+
SG_ADD(&skip, "skip", "skip");
216+
SG_ADD(&count, "count", "count");
217217
SG_ADD(
218-
&use_bias, "use_bias", "Indicates if bias is used.", MS_NOT_AVAILABLE);
218+
&use_bias, "use_bias", "Indicates if bias is used.");
219219
SG_ADD(
220220
&use_regularized_bias, "use_regularized_bias",
221-
"Indicates if bias is regularized.", MS_NOT_AVAILABLE);
221+
"Indicates if bias is regularized.");
222222
}

src/shogun/classifier/svm/SGDQN.cpp

+5-5
Original file line numberDiff line numberDiff line change
@@ -226,9 +226,9 @@ void CSGDQN::init()
226226
loss=new CHingeLoss();
227227
SG_REF(loss);
228228

229-
SG_ADD(&C1, "C1", "Cost constant 1.", MS_AVAILABLE);
230-
SG_ADD(&C2, "C2", "Cost constant 2.", MS_AVAILABLE);
231-
SG_ADD(&epochs, "epochs", "epochs", MS_AVAILABLE);
232-
SG_ADD(&skip, "skip", "skip", MS_NOT_AVAILABLE);
233-
SG_ADD(&count, "count", "count", MS_NOT_AVAILABLE);
229+
SG_ADD(&C1, "C1", "Cost constant 1.", ParameterProperties::HYPER);
230+
SG_ADD(&C2, "C2", "Cost constant 2.", ParameterProperties::HYPER);
231+
SG_ADD(&epochs, "epochs", "epochs", ParameterProperties::HYPER);
232+
SG_ADD(&skip, "skip", "skip");
233+
SG_ADD(&count, "count", "count");
234234
}

src/shogun/classifier/svm/SVM.cpp

+11-14
Original file line numberDiff line numberDiff line change
@@ -40,21 +40,18 @@ CSVM::~CSVM()
4040

4141
void CSVM::set_defaults(int32_t num_sv)
4242
{
43-
SG_ADD(&C1, "C1", "", MS_AVAILABLE);
44-
SG_ADD(&C2, "C2", "", MS_AVAILABLE);
45-
SG_ADD(&svm_loaded, "svm_loaded", "SVM is loaded.", MS_NOT_AVAILABLE);
46-
SG_ADD(&epsilon, "epsilon", "", MS_AVAILABLE);
43+
SG_ADD(&C1, "C1", "", ParameterProperties::HYPER);
44+
SG_ADD(&C2, "C2", "", ParameterProperties::HYPER);
45+
SG_ADD(&svm_loaded, "svm_loaded", "SVM is loaded.");
46+
SG_ADD(&epsilon, "epsilon", "", ParameterProperties::HYPER);
4747
SG_ADD(&tube_epsilon, "tube_epsilon",
48-
"Tube epsilon for support vector regression.", MS_AVAILABLE);
49-
SG_ADD(&nu, "nu", "", MS_AVAILABLE);
50-
SG_ADD(&objective, "objective", "", MS_NOT_AVAILABLE);
51-
SG_ADD(&qpsize, "qpsize", "", MS_NOT_AVAILABLE);
52-
SG_ADD(&use_shrinking, "use_shrinking", "Shrinking shall be used.",
53-
MS_NOT_AVAILABLE);
54-
SG_ADD((CSGObject**) &mkl, "mkl", "MKL object that svm optimizers need.",
55-
MS_NOT_AVAILABLE);
56-
SG_ADD(&m_linear_term, "linear_term", "Linear term in qp.",
57-
MS_NOT_AVAILABLE);
48+
"Tube epsilon for support vector regression.", ParameterProperties::HYPER);
49+
SG_ADD(&nu, "nu", "", ParameterProperties::HYPER);
50+
SG_ADD(&objective, "objective", "");
51+
SG_ADD(&qpsize, "qpsize", "");
52+
SG_ADD(&use_shrinking, "use_shrinking", "Shrinking shall be used.");
53+
SG_ADD((CSGObject**) &mkl, "mkl", "MKL object that svm optimizers need.");
54+
SG_ADD(&m_linear_term, "linear_term", "Linear term in qp.");
5855

5956
callback=NULL;
6057
mkl=NULL;

src/shogun/classifier/svm/SVMOcas.cpp

+6-8
Original file line numberDiff line numberDiff line change
@@ -345,17 +345,15 @@ void CSVMOcas::init()
345345

346346
primal_objective = 0.0;
347347

348-
SG_ADD(&C1, "C1", "Cost constant 1.", MS_AVAILABLE);
349-
SG_ADD(&C2, "C2", "Cost constant 2.", MS_AVAILABLE);
348+
SG_ADD(&C1, "C1", "Cost constant 1.", ParameterProperties::HYPER);
349+
SG_ADD(&C2, "C2", "Cost constant 2.", ParameterProperties::HYPER);
350350
SG_ADD(
351-
&use_bias, "use_bias", "Indicates if bias is used.", MS_NOT_AVAILABLE);
352-
SG_ADD(&epsilon, "epsilon", "Convergence precision.", MS_NOT_AVAILABLE);
351+
&use_bias, "use_bias", "Indicates if bias is used.");
352+
SG_ADD(&epsilon, "epsilon", "Convergence precision.");
353353
SG_ADD(
354-
&bufsize, "bufsize", "Maximum number of cutting planes.",
355-
MS_NOT_AVAILABLE);
354+
&bufsize, "bufsize", "Maximum number of cutting planes.");
356355
SG_ADD(
357-
(machine_int_t*)&method, "method", "SVMOcas solver type.",
358-
MS_NOT_AVAILABLE);
356+
(machine_int_t*)&method, "method", "SVMOcas solver type.");
359357
}
360358

361359
float64_t CSVMOcas::compute_primal_objective() const

src/shogun/clustering/GMM.cpp

+1-2
Original file line numberDiff line numberDiff line change
@@ -826,6 +826,5 @@ void CGMM::register_params()
826826
//TODO serialization broken
827827
//m_parameters->add((SGVector<CSGObject*>*) &m_components, "m_components", "Mixture components");
828828
SG_ADD(
829-
&m_coefficients, "m_coefficients", "Mixture coefficients.",
830-
MS_NOT_AVAILABLE);
829+
&m_coefficients, "m_coefficients", "Mixture coefficients.");
831830
}

src/shogun/clustering/KMeansBase.cpp

+4-4
Original file line numberDiff line numberDiff line change
@@ -359,10 +359,10 @@ void CKMeansBase::init()
359359
dimensions=0;
360360
fixed_centers=false;
361361
use_kmeanspp=false;
362-
SG_ADD(&max_iter, "max_iter", "Maximum number of iterations", MS_AVAILABLE);
363-
SG_ADD(&k, "k", "k, the number of clusters", MS_AVAILABLE);
364-
SG_ADD(&dimensions, "dimensions", "Dimensions of data", MS_NOT_AVAILABLE);
365-
SG_ADD(&R, "radiuses", "Cluster radiuses", MS_NOT_AVAILABLE);
362+
SG_ADD(&max_iter, "max_iter", "Maximum number of iterations", ParameterProperties::HYPER);
363+
SG_ADD(&k, "k", "k, the number of clusters", ParameterProperties::HYPER);
364+
SG_ADD(&dimensions, "dimensions", "Dimensions of data");
365+
SG_ADD(&R, "radiuses", "Cluster radiuses");
366366

367367
watch_method("cluster_centers", &CKMeansBase::get_cluster_centers);
368368
}

src/shogun/clustering/KMeansMiniBatch.cpp

+1-2
Original file line numberDiff line numberDiff line change
@@ -140,8 +140,7 @@ void CKMeansMiniBatch::init_mb_params()
140140
batch_size=-1;
141141

142142
SG_ADD(
143-
&batch_size, "batch_size", "batch size for mini-batch KMeans",
144-
MS_NOT_AVAILABLE);
143+
&batch_size, "batch_size", "batch size for mini-batch KMeans");
145144
}
146145

147146
bool CKMeansMiniBatch::train_machine(CFeatures* data)

src/shogun/converter/DiffusionMaps.cpp

+2-2
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@ CDiffusionMaps::CDiffusionMaps() :
2525

2626
void CDiffusionMaps::init()
2727
{
28-
SG_ADD(&m_t, "t", "number of steps", MS_AVAILABLE);
29-
SG_ADD(&m_width, "width", "gaussian kernel width", MS_AVAILABLE);
28+
SG_ADD(&m_t, "t", "number of steps", ParameterProperties::HYPER);
29+
SG_ADD(&m_width, "width", "gaussian kernel width", ParameterProperties::HYPER);
3030
}
3131

3232
CDiffusionMaps::~CDiffusionMaps()

src/shogun/converter/EmbeddingConverter.cpp

+3-3
Original file line numberDiff line numberDiff line change
@@ -69,11 +69,11 @@ CKernel* CEmbeddingConverter::get_kernel() const
6969
void CEmbeddingConverter::init()
7070
{
7171
SG_ADD(&m_target_dim, "target_dim",
72-
"target dimensionality of preprocessor", MS_AVAILABLE);
72+
"target dimensionality of preprocessor", ParameterProperties::HYPER);
7373
SG_ADD(
7474
&m_distance, "distance", "distance to be used for embedding",
75-
MS_AVAILABLE);
75+
ParameterProperties::HYPER);
7676
SG_ADD(
77-
&m_kernel, "kernel", "kernel to be used for embedding", MS_AVAILABLE);
77+
&m_kernel, "kernel", "kernel to be used for embedding", ParameterProperties::HYPER);
7878
}
7979
}

src/shogun/converter/FactorAnalysis.cpp

+2-2
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,8 @@ CFactorAnalysis::CFactorAnalysis() :
2121

2222
void CFactorAnalysis::init()
2323
{
24-
SG_ADD(&m_max_iteration, "max_iteration", "maximum number of iterations", MS_NOT_AVAILABLE);
25-
SG_ADD(&m_epsilon, "epsilon", "convergence parameter", MS_NOT_AVAILABLE);
24+
SG_ADD(&m_max_iteration, "max_iteration", "maximum number of iterations");
25+
SG_ADD(&m_epsilon, "epsilon", "convergence parameter");
2626
}
2727

2828
CFactorAnalysis::~CFactorAnalysis()

0 commit comments

Comments
 (0)