You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

svmtrain.c 12 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495
  1. #include <stdio.h>
  2. #include <stdlib.h>
  3. #include <string.h>
  4. #include <ctype.h>
  5. #include "svm.h"
  6. #include "mex.h"
  7. #include "svm_model_matlab.h"
  8. #ifdef MX_API_VER
  9. #if MX_API_VER < 0x07030000
  10. typedef int mwIndex;
  11. #endif
  12. #endif
  13. #define CMD_LEN 2048
  14. #define Malloc(type,n) (type *)malloc((n)*sizeof(type))
  15. void print_null(const char *s) {}
  16. void print_string_matlab(const char *s) {mexPrintf(s);}
  17. void exit_with_help()
  18. {
  19. mexPrintf(
  20. "Usage: model = svmtrain(training_label_vector, training_instance_matrix, 'libsvm_options');\n"
  21. "libsvm_options:\n"
  22. "-s svm_type : set type of SVM (default 0)\n"
  23. " 0 -- C-SVC (multi-class classification)\n"
  24. " 1 -- nu-SVC (multi-class classification)\n"
  25. " 2 -- one-class SVM\n"
  26. " 3 -- epsilon-SVR (regression)\n"
  27. " 4 -- nu-SVR (regression)\n"
  28. "-t kernel_type : set type of kernel function (default 2)\n"
  29. " 0 -- linear: u'*v\n"
  30. " 1 -- polynomial: (gamma*u'*v + coef0)^degree\n"
  31. " 2 -- radial basis function: exp(-gamma*|u-v|^2)\n"
  32. " 3 -- sigmoid: tanh(gamma*u'*v + coef0)\n"
  33. " 4 -- precomputed kernel (kernel values in training_instance_matrix)\n"
  34. "-d degree : set degree in kernel function (default 3)\n"
  35. "-g gamma : set gamma in kernel function (default 1/num_features)\n"
  36. "-r coef0 : set coef0 in kernel function (default 0)\n"
  37. "-c cost : set the parameter C of C-SVC, epsilon-SVR, and nu-SVR (default 1)\n"
  38. "-n nu : set the parameter nu of nu-SVC, one-class SVM, and nu-SVR (default 0.5)\n"
  39. "-p epsilon : set the epsilon in loss function of epsilon-SVR (default 0.1)\n"
  40. "-m cachesize : set cache memory size in MB (default 100)\n"
  41. "-e epsilon : set tolerance of termination criterion (default 0.001)\n"
  42. "-h shrinking : whether to use the shrinking heuristics, 0 or 1 (default 1)\n"
  43. "-b probability_estimates : whether to train a SVC or SVR model for probability estimates, 0 or 1 (default 0)\n"
  44. "-wi weight : set the parameter C of class i to weight*C, for C-SVC (default 1)\n"
  45. "-v n : n-fold cross validation mode\n"
  46. "-q : quiet mode (no outputs)\n"
  47. );
  48. }
  49. // svm arguments
  50. struct svm_parameter param; // set by parse_command_line
  51. struct svm_problem prob; // set by read_problem
  52. struct svm_model *model;
  53. struct svm_node *x_space;
  54. int cross_validation;
  55. int nr_fold;
  56. double do_cross_validation()
  57. {
  58. int i;
  59. int total_correct = 0;
  60. double total_error = 0;
  61. double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
  62. double *target = Malloc(double,prob.l);
  63. double retval = 0.0;
  64. svm_cross_validation(&prob,&param,nr_fold,target);
  65. if(param.svm_type == EPSILON_SVR ||
  66. param.svm_type == NU_SVR)
  67. {
  68. for(i=0;i<prob.l;i++)
  69. {
  70. double y = prob.y[i];
  71. double v = target[i];
  72. total_error += (v-y)*(v-y);
  73. sumv += v;
  74. sumy += y;
  75. sumvv += v*v;
  76. sumyy += y*y;
  77. sumvy += v*y;
  78. }
  79. mexPrintf("Cross Validation Mean squared error = %g\n",total_error/prob.l);
  80. mexPrintf("Cross Validation Squared correlation coefficient = %g\n",
  81. ((prob.l*sumvy-sumv*sumy)*(prob.l*sumvy-sumv*sumy))/
  82. ((prob.l*sumvv-sumv*sumv)*(prob.l*sumyy-sumy*sumy))
  83. );
  84. retval = total_error/prob.l;
  85. }
  86. else
  87. {
  88. for(i=0;i<prob.l;i++)
  89. if(target[i] == prob.y[i])
  90. ++total_correct;
  91. mexPrintf("Cross Validation Accuracy = %g%%\n",100.0*total_correct/prob.l);
  92. retval = 100.0*total_correct/prob.l;
  93. }
  94. free(target);
  95. return retval;
  96. }
  97. // nrhs should be 3
  98. int parse_command_line(int nrhs, const mxArray *prhs[], char *model_file_name)
  99. {
  100. int i, argc = 1;
  101. char cmd[CMD_LEN];
  102. char *argv[CMD_LEN/2];
  103. void (*print_func)(const char *) = print_string_matlab; // default printing to matlab display
  104. // default values
  105. param.svm_type = C_SVC;
  106. param.kernel_type = RBF;
  107. param.degree = 3;
  108. param.gamma = 0; // 1/num_features
  109. param.coef0 = 0;
  110. param.nu = 0.5;
  111. param.cache_size = 100;
  112. param.C = 1;
  113. param.eps = 1e-3;
  114. param.p = 0.1;
  115. param.shrinking = 1;
  116. param.probability = 0;
  117. param.nr_weight = 0;
  118. param.weight_label = NULL;
  119. param.weight = NULL;
  120. cross_validation = 0;
  121. if(nrhs <= 1)
  122. return 1;
  123. if(nrhs > 2)
  124. {
  125. // put options in argv[]
  126. mxGetString(prhs[2], cmd, mxGetN(prhs[2]) + 1);
  127. if((argv[argc] = strtok(cmd, " ")) != NULL)
  128. while((argv[++argc] = strtok(NULL, " ")) != NULL)
  129. ;
  130. }
  131. // parse options
  132. for(i=1;i<argc;i++)
  133. {
  134. if(argv[i][0] != '-') break;
  135. ++i;
  136. if(i>=argc && argv[i-1][1] != 'q') // since option -q has no parameter
  137. return 1;
  138. switch(argv[i-1][1])
  139. {
  140. case 's':
  141. param.svm_type = atoi(argv[i]);
  142. break;
  143. case 't':
  144. param.kernel_type = atoi(argv[i]);
  145. break;
  146. case 'd':
  147. param.degree = atoi(argv[i]);
  148. break;
  149. case 'g':
  150. param.gamma = atof(argv[i]);
  151. break;
  152. case 'r':
  153. param.coef0 = atof(argv[i]);
  154. break;
  155. case 'n':
  156. param.nu = atof(argv[i]);
  157. break;
  158. case 'm':
  159. param.cache_size = atof(argv[i]);
  160. break;
  161. case 'c':
  162. param.C = atof(argv[i]);
  163. break;
  164. case 'e':
  165. param.eps = atof(argv[i]);
  166. break;
  167. case 'p':
  168. param.p = atof(argv[i]);
  169. break;
  170. case 'h':
  171. param.shrinking = atoi(argv[i]);
  172. break;
  173. case 'b':
  174. param.probability = atoi(argv[i]);
  175. break;
  176. case 'q':
  177. print_func = &print_null;
  178. i--;
  179. break;
  180. case 'v':
  181. cross_validation = 1;
  182. nr_fold = atoi(argv[i]);
  183. if(nr_fold < 2)
  184. {
  185. mexPrintf("n-fold cross validation: n must >= 2\n");
  186. return 1;
  187. }
  188. break;
  189. case 'w':
  190. ++param.nr_weight;
  191. param.weight_label = (int *)realloc(param.weight_label,sizeof(int)*param.nr_weight);
  192. param.weight = (double *)realloc(param.weight,sizeof(double)*param.nr_weight);
  193. param.weight_label[param.nr_weight-1] = atoi(&argv[i-1][2]);
  194. param.weight[param.nr_weight-1] = atof(argv[i]);
  195. break;
  196. default:
  197. mexPrintf("Unknown option -%c\n", argv[i-1][1]);
  198. return 1;
  199. }
  200. }
  201. svm_set_print_string_function(print_func);
  202. return 0;
  203. }
  204. // read in a problem (in svmlight format)
  205. int read_problem_dense(const mxArray *label_vec, const mxArray *instance_mat)
  206. {
  207. // using size_t due to the output type of matlab functions
  208. size_t i, j, k, l;
  209. size_t elements, max_index, sc, label_vector_row_num;
  210. double *samples, *labels;
  211. prob.x = NULL;
  212. prob.y = NULL;
  213. x_space = NULL;
  214. labels = mxGetPr(label_vec);
  215. samples = mxGetPr(instance_mat);
  216. sc = mxGetN(instance_mat);
  217. elements = 0;
  218. // number of instances
  219. l = mxGetM(instance_mat);
  220. label_vector_row_num = mxGetM(label_vec);
  221. prob.l = (int)l;
  222. if(label_vector_row_num!=l)
  223. {
  224. mexPrintf("Length of label vector does not match # of instances.\n");
  225. return -1;
  226. }
  227. if(param.kernel_type == PRECOMPUTED)
  228. elements = l * (sc + 1);
  229. else
  230. {
  231. for(i = 0; i < l; i++)
  232. {
  233. for(k = 0; k < sc; k++)
  234. if(samples[k * l + i] != 0)
  235. elements++;
  236. // count the '-1' element
  237. elements++;
  238. }
  239. }
  240. prob.y = Malloc(double,l);
  241. prob.x = Malloc(struct svm_node *,l);
  242. x_space = Malloc(struct svm_node, elements);
  243. max_index = sc;
  244. j = 0;
  245. for(i = 0; i < l; i++)
  246. {
  247. prob.x[i] = &x_space[j];
  248. prob.y[i] = labels[i];
  249. for(k = 0; k < sc; k++)
  250. {
  251. if(param.kernel_type == PRECOMPUTED || samples[k * l + i] != 0)
  252. {
  253. x_space[j].index = (int)k + 1;
  254. x_space[j].value = samples[k * l + i];
  255. j++;
  256. }
  257. }
  258. x_space[j++].index = -1;
  259. }
  260. if(param.gamma == 0 && max_index > 0)
  261. param.gamma = (double)(1.0/max_index);
  262. if(param.kernel_type == PRECOMPUTED)
  263. for(i=0;i<l;i++)
  264. {
  265. if((int)prob.x[i][0].value <= 0 || (int)prob.x[i][0].value > (int)max_index)
  266. {
  267. mexPrintf("Wrong input format: sample_serial_number out of range\n");
  268. return -1;
  269. }
  270. }
  271. return 0;
  272. }
  273. int read_problem_sparse(const mxArray *label_vec, const mxArray *instance_mat)
  274. {
  275. mwIndex *ir, *jc, low, high, k;
  276. // using size_t due to the output type of matlab functions
  277. size_t i, j, l, elements, max_index, label_vector_row_num;
  278. mwSize num_samples;
  279. double *samples, *labels;
  280. mxArray *instance_mat_col; // transposed instance sparse matrix
  281. prob.x = NULL;
  282. prob.y = NULL;
  283. x_space = NULL;
  284. // transpose instance matrix
  285. {
  286. mxArray *prhs[1], *plhs[1];
  287. prhs[0] = mxDuplicateArray(instance_mat);
  288. if(mexCallMATLAB(1, plhs, 1, prhs, "transpose"))
  289. {
  290. mexPrintf("Error: cannot transpose training instance matrix\n");
  291. return -1;
  292. }
  293. instance_mat_col = plhs[0];
  294. mxDestroyArray(prhs[0]);
  295. }
  296. // each column is one instance
  297. labels = mxGetPr(label_vec);
  298. samples = mxGetPr(instance_mat_col);
  299. ir = mxGetIr(instance_mat_col);
  300. jc = mxGetJc(instance_mat_col);
  301. num_samples = mxGetNzmax(instance_mat_col);
  302. // number of instances
  303. l = mxGetN(instance_mat_col);
  304. label_vector_row_num = mxGetM(label_vec);
  305. prob.l = (int) l;
  306. if(label_vector_row_num!=l)
  307. {
  308. mexPrintf("Length of label vector does not match # of instances.\n");
  309. return -1;
  310. }
  311. elements = num_samples + l;
  312. max_index = mxGetM(instance_mat_col);
  313. prob.y = Malloc(double,l);
  314. prob.x = Malloc(struct svm_node *,l);
  315. x_space = Malloc(struct svm_node, elements);
  316. j = 0;
  317. for(i=0;i<l;i++)
  318. {
  319. prob.x[i] = &x_space[j];
  320. prob.y[i] = labels[i];
  321. low = jc[i], high = jc[i+1];
  322. for(k=low;k<high;k++)
  323. {
  324. x_space[j].index = (int)ir[k] + 1;
  325. x_space[j].value = samples[k];
  326. j++;
  327. }
  328. x_space[j++].index = -1;
  329. }
  330. if(param.gamma == 0 && max_index > 0)
  331. param.gamma = (double)(1.0/max_index);
  332. return 0;
  333. }
  334. static void fake_answer(int nlhs, mxArray *plhs[])
  335. {
  336. int i;
  337. for(i=0;i<nlhs;i++)
  338. plhs[i] = mxCreateDoubleMatrix(0, 0, mxREAL);
  339. }
  340. // Interface function of matlab
  341. // now assume prhs[0]: label prhs[1]: features
  342. void mexFunction( int nlhs, mxArray *plhs[],
  343. int nrhs, const mxArray *prhs[] )
  344. {
  345. const char *error_msg;
  346. // fix random seed to have same results for each run
  347. // (for cross validation and probability estimation)
  348. srand(1);
  349. if(nlhs > 1)
  350. {
  351. exit_with_help();
  352. fake_answer(nlhs, plhs);
  353. return;
  354. }
  355. // Transform the input Matrix to libsvm format
  356. if(nrhs > 1 && nrhs < 4)
  357. {
  358. int err;
  359. if(!mxIsDouble(prhs[0]) || !mxIsDouble(prhs[1]))
  360. {
  361. mexPrintf("Error: label vector and instance matrix must be double\n");
  362. fake_answer(nlhs, plhs);
  363. return;
  364. }
  365. if(mxIsSparse(prhs[0]))
  366. {
  367. mexPrintf("Error: label vector should not be in sparse format\n");
  368. fake_answer(nlhs, plhs);
  369. return;
  370. }
  371. if(parse_command_line(nrhs, prhs, NULL))
  372. {
  373. exit_with_help();
  374. svm_destroy_param(&param);
  375. fake_answer(nlhs, plhs);
  376. return;
  377. }
  378. if(mxIsSparse(prhs[1]))
  379. {
  380. if(param.kernel_type == PRECOMPUTED)
  381. {
  382. // precomputed kernel requires dense matrix, so we make one
  383. mxArray *rhs[1], *lhs[1];
  384. rhs[0] = mxDuplicateArray(prhs[1]);
  385. if(mexCallMATLAB(1, lhs, 1, rhs, "full"))
  386. {
  387. mexPrintf("Error: cannot generate a full training instance matrix\n");
  388. svm_destroy_param(&param);
  389. fake_answer(nlhs, plhs);
  390. return;
  391. }
  392. err = read_problem_dense(prhs[0], lhs[0]);
  393. mxDestroyArray(lhs[0]);
  394. mxDestroyArray(rhs[0]);
  395. }
  396. else
  397. err = read_problem_sparse(prhs[0], prhs[1]);
  398. }
  399. else
  400. err = read_problem_dense(prhs[0], prhs[1]);
  401. // svmtrain's original code
  402. error_msg = svm_check_parameter(&prob, &param);
  403. if(err || error_msg)
  404. {
  405. if (error_msg != NULL)
  406. mexPrintf("Error: %s\n", error_msg);
  407. svm_destroy_param(&param);
  408. free(prob.y);
  409. free(prob.x);
  410. free(x_space);
  411. fake_answer(nlhs, plhs);
  412. return;
  413. }
  414. if(cross_validation)
  415. {
  416. double *ptr;
  417. plhs[0] = mxCreateDoubleMatrix(1, 1, mxREAL);
  418. ptr = mxGetPr(plhs[0]);
  419. ptr[0] = do_cross_validation();
  420. }
  421. else
  422. {
  423. int nr_feat = (int)mxGetN(prhs[1]);
  424. const char *error_msg;
  425. model = svm_train(&prob, &param);
  426. error_msg = model_to_matlab_structure(plhs, nr_feat, model);
  427. if(error_msg)
  428. mexPrintf("Error: can't convert libsvm model to matrix structure: %s\n", error_msg);
  429. svm_free_and_destroy_model(&model);
  430. }
  431. svm_destroy_param(&param);
  432. free(prob.y);
  433. free(prob.x);
  434. free(x_space);
  435. }
  436. else
  437. {
  438. exit_with_help();
  439. fake_answer(nlhs, plhs);
  440. return;
  441. }
  442. }

A Python package for graph kernels, graph edit distances and graph pre-image problem.