diff --git a/examples/bayes_net_gui_ex.cpp b/examples/bayes_net_gui_ex.cpp index b4cc9e4dd..70aefc30d 100644 --- a/examples/bayes_net_gui_ex.cpp +++ b/examples/bayes_net_gui_ex.cpp @@ -626,6 +626,7 @@ on_menu_help_about ( { message_box("About","This application is the GUI front end to the dlib C++ Library's\n" "Bayesian Network inference utilities\n\n" + "Version 1.1\n\n" "See http://dclib.sourceforge.net for updates"); } diff --git a/examples/svm_ex.cpp b/examples/svm_ex.cpp index 64efb8758..9bb82941f 100644 --- a/examples/svm_ex.cpp +++ b/examples/svm_ex.cpp @@ -144,6 +144,7 @@ int main() // We can also train a decision function that reports a well conditioned probability instead of just a number // > 0 for the +1 class and < 0 for the -1 class. An example of doing that follows: probabilistic_decision_function learned_probabilistic_decision_function = svm_nu_train_prob(samples, labels, kernel_type(0.1), 0.1, 3); + // Now we have a function that returns the probability that a given sample is of the +1 class. // print out the number of support vectors in the resulting decision function. (it should be the same as in the one above) cout << "\nnumber of support vectors in our learned_probabilistic_decision_function is "