You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

svm.java 64 kB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860
  1. package libsvm;
  2. import java.io.*;
  3. import java.util.*;
  4. //
  5. // Kernel Cache
  6. //
  7. // l is the number of total data items
  8. // size is the cache size limit in bytes
  9. //
  10. class Cache {
  11. private final int l;
  12. private long size;
  13. private final class head_t
  14. {
  15. head_t prev, next; // a cicular list
  16. float[] data;
  17. int len; // data[0,len) is cached in this entry
  18. }
  19. private final head_t[] head;
  20. private head_t lru_head;
  21. Cache(int l_, long size_)
  22. {
  23. l = l_;
  24. size = size_;
  25. head = new head_t[l];
  26. for(int i=0;i<l;i++) head[i] = new head_t();
  27. size /= 4;
  28. size -= l * (16/4); // sizeof(head_t) == 16
  29. size = Math.max(size, 2* (long) l); // cache must be large enough for two columns
  30. lru_head = new head_t();
  31. lru_head.next = lru_head.prev = lru_head;
  32. }
  33. private void lru_delete(head_t h)
  34. {
  35. // delete from current location
  36. h.prev.next = h.next;
  37. h.next.prev = h.prev;
  38. }
  39. private void lru_insert(head_t h)
  40. {
  41. // insert to last position
  42. h.next = lru_head;
  43. h.prev = lru_head.prev;
  44. h.prev.next = h;
  45. h.next.prev = h;
  46. }
  47. // request data [0,len)
  48. // return some position p where [p,len) need to be filled
  49. // (p >= len if nothing needs to be filled)
  50. // java: simulate pointer using single-element array
  51. int get_data(int index, float[][] data, int len)
  52. {
  53. head_t h = head[index];
  54. if(h.len > 0) lru_delete(h);
  55. int more = len - h.len;
  56. if(more > 0)
  57. {
  58. // free old space
  59. while(size < more)
  60. {
  61. head_t old = lru_head.next;
  62. lru_delete(old);
  63. size += old.len;
  64. old.data = null;
  65. old.len = 0;
  66. }
  67. // allocate new space
  68. float[] new_data = new float[len];
  69. if(h.data != null) System.arraycopy(h.data,0,new_data,0,h.len);
  70. h.data = new_data;
  71. size -= more;
  72. do {int tmp=h.len; h.len=len; len=tmp;} while(false);
  73. }
  74. lru_insert(h);
  75. data[0] = h.data;
  76. return len;
  77. }
  78. void swap_index(int i, int j)
  79. {
  80. if(i==j) return;
  81. if(head[i].len > 0) lru_delete(head[i]);
  82. if(head[j].len > 0) lru_delete(head[j]);
  83. do {float[] tmp=head[i].data; head[i].data=head[j].data; head[j].data=tmp;} while(false);
  84. do {int tmp=head[i].len; head[i].len=head[j].len; head[j].len=tmp;} while(false);
  85. if(head[i].len > 0) lru_insert(head[i]);
  86. if(head[j].len > 0) lru_insert(head[j]);
  87. if(i>j) do {int tmp=i; i=j; j=tmp;} while(false);
  88. for(head_t h = lru_head.next; h!=lru_head; h=h.next)
  89. {
  90. if(h.len > i)
  91. {
  92. if(h.len > j)
  93. do {float tmp=h.data[i]; h.data[i]=h.data[j]; h.data[j]=tmp;} while(false);
  94. else
  95. {
  96. // give up
  97. lru_delete(h);
  98. size += h.len;
  99. h.data = null;
  100. h.len = 0;
  101. }
  102. }
  103. }
  104. }
  105. }
  106. //
  107. // Kernel evaluation
  108. //
  109. // the static method k_function is for doing single kernel evaluation
  110. // the constructor of Kernel prepares to calculate the l*l kernel matrix
  111. // the member function get_Q is for getting one column from the Q Matrix
  112. //
  113. abstract class QMatrix {
  114. abstract float[] get_Q(int column, int len);
  115. abstract double[] get_QD();
  116. abstract void swap_index(int i, int j);
  117. };
  118. abstract class Kernel extends QMatrix {
  119. private svm_node[][] x;
  120. private final double[] x_square;
  121. // svm_parameter
  122. private final int kernel_type;
  123. private final int degree;
  124. private final double gamma;
  125. private final double coef0;
  126. abstract float[] get_Q(int column, int len);
  127. abstract double[] get_QD();
  128. void swap_index(int i, int j)
  129. {
  130. do {svm_node[] tmp=x[i]; x[i]=x[j]; x[j]=tmp;} while(false);
  131. if(x_square != null) do {double tmp=x_square[i]; x_square[i]=x_square[j]; x_square[j]=tmp;} while(false);
  132. }
  133. private static double powi(double base, int times)
  134. {
  135. double tmp = base, ret = 1.0;
  136. for(int t=times; t>0; t/=2)
  137. {
  138. if(t%2==1) ret*=tmp;
  139. tmp = tmp * tmp;
  140. }
  141. return ret;
  142. }
  143. double kernel_function(int i, int j)
  144. {
  145. switch(kernel_type)
  146. {
  147. case svm_parameter.LINEAR:
  148. return dot(x[i],x[j]);
  149. case svm_parameter.POLY:
  150. return powi(gamma*dot(x[i],x[j])+coef0,degree);
  151. case svm_parameter.RBF:
  152. return Math.exp(-gamma*(x_square[i]+x_square[j]-2*dot(x[i],x[j])));
  153. case svm_parameter.SIGMOID:
  154. return Math.tanh(gamma*dot(x[i],x[j])+coef0);
  155. case svm_parameter.PRECOMPUTED:
  156. return x[i][(int)(x[j][0].value)].value;
  157. default:
  158. return 0; // java
  159. }
  160. }
  161. Kernel(int l, svm_node[][] x_, svm_parameter param)
  162. {
  163. this.kernel_type = param.kernel_type;
  164. this.degree = param.degree;
  165. this.gamma = param.gamma;
  166. this.coef0 = param.coef0;
  167. x = (svm_node[][])x_.clone();
  168. if(kernel_type == svm_parameter.RBF)
  169. {
  170. x_square = new double[l];
  171. for(int i=0;i<l;i++)
  172. x_square[i] = dot(x[i],x[i]);
  173. }
  174. else x_square = null;
  175. }
  176. static double dot(svm_node[] x, svm_node[] y)
  177. {
  178. double sum = 0;
  179. int xlen = x.length;
  180. int ylen = y.length;
  181. int i = 0;
  182. int j = 0;
  183. while(i < xlen && j < ylen)
  184. {
  185. if(x[i].index == y[j].index)
  186. sum += x[i++].value * y[j++].value;
  187. else
  188. {
  189. if(x[i].index > y[j].index)
  190. ++j;
  191. else
  192. ++i;
  193. }
  194. }
  195. return sum;
  196. }
  197. static double k_function(svm_node[] x, svm_node[] y,
  198. svm_parameter param)
  199. {
  200. switch(param.kernel_type)
  201. {
  202. case svm_parameter.LINEAR:
  203. return dot(x,y);
  204. case svm_parameter.POLY:
  205. return powi(param.gamma*dot(x,y)+param.coef0,param.degree);
  206. case svm_parameter.RBF:
  207. {
  208. double sum = 0;
  209. int xlen = x.length;
  210. int ylen = y.length;
  211. int i = 0;
  212. int j = 0;
  213. while(i < xlen && j < ylen)
  214. {
  215. if(x[i].index == y[j].index)
  216. {
  217. double d = x[i++].value - y[j++].value;
  218. sum += d*d;
  219. }
  220. else if(x[i].index > y[j].index)
  221. {
  222. sum += y[j].value * y[j].value;
  223. ++j;
  224. }
  225. else
  226. {
  227. sum += x[i].value * x[i].value;
  228. ++i;
  229. }
  230. }
  231. while(i < xlen)
  232. {
  233. sum += x[i].value * x[i].value;
  234. ++i;
  235. }
  236. while(j < ylen)
  237. {
  238. sum += y[j].value * y[j].value;
  239. ++j;
  240. }
  241. return Math.exp(-param.gamma*sum);
  242. }
  243. case svm_parameter.SIGMOID:
  244. return Math.tanh(param.gamma*dot(x,y)+param.coef0);
  245. case svm_parameter.PRECOMPUTED:
  246. return x[(int)(y[0].value)].value;
  247. default:
  248. return 0; // java
  249. }
  250. }
  251. }
  252. // An SMO algorithm in Fan et al., JMLR 6(2005), p. 1889--1918
  253. // Solves:
  254. //
  255. // min 0.5(\alpha^T Q \alpha) + p^T \alpha
  256. //
  257. // y^T \alpha = \delta
  258. // y_i = +1 or -1
  259. // 0 <= alpha_i <= Cp for y_i = 1
  260. // 0 <= alpha_i <= Cn for y_i = -1
  261. //
  262. // Given:
  263. //
  264. // Q, p, y, Cp, Cn, and an initial feasible point \alpha
  265. // l is the size of vectors and matrices
  266. // eps is the stopping tolerance
  267. //
  268. // solution will be put in \alpha, objective value will be put in obj
  269. //
  270. class Solver {
  271. int active_size;
  272. byte[] y;
  273. double[] G; // gradient of objective function
  274. static final byte LOWER_BOUND = 0;
  275. static final byte UPPER_BOUND = 1;
  276. static final byte FREE = 2;
  277. byte[] alpha_status; // LOWER_BOUND, UPPER_BOUND, FREE
  278. double[] alpha;
  279. QMatrix Q;
  280. double[] QD;
  281. double eps;
  282. double Cp,Cn;
  283. double[] p;
  284. int[] active_set;
  285. double[] G_bar; // gradient, if we treat free variables as 0
  286. int l;
  287. boolean unshrink; // XXX
  288. static final double INF = java.lang.Double.POSITIVE_INFINITY;
  289. double get_C(int i)
  290. {
  291. return (y[i] > 0)? Cp : Cn;
  292. }
  293. void update_alpha_status(int i)
  294. {
  295. if(alpha[i] >= get_C(i))
  296. alpha_status[i] = UPPER_BOUND;
  297. else if(alpha[i] <= 0)
  298. alpha_status[i] = LOWER_BOUND;
  299. else alpha_status[i] = FREE;
  300. }
  301. boolean is_upper_bound(int i) { return alpha_status[i] == UPPER_BOUND; }
  302. boolean is_lower_bound(int i) { return alpha_status[i] == LOWER_BOUND; }
  303. boolean is_free(int i) { return alpha_status[i] == FREE; }
  304. // java: information about solution except alpha,
  305. // because we cannot return multiple values otherwise...
  306. static class SolutionInfo {
  307. double obj;
  308. double rho;
  309. double upper_bound_p;
  310. double upper_bound_n;
  311. double r; // for Solver_NU
  312. }
  313. void swap_index(int i, int j)
  314. {
  315. Q.swap_index(i,j);
  316. do {byte tmp=y[i]; y[i]=y[j]; y[j]=tmp;} while(false);
  317. do {double tmp=G[i]; G[i]=G[j]; G[j]=tmp;} while(false);
  318. do {byte tmp=alpha_status[i]; alpha_status[i]=alpha_status[j]; alpha_status[j]=tmp;} while(false);
  319. do {double tmp=alpha[i]; alpha[i]=alpha[j]; alpha[j]=tmp;} while(false);
  320. do {double tmp=p[i]; p[i]=p[j]; p[j]=tmp;} while(false);
  321. do {int tmp=active_set[i]; active_set[i]=active_set[j]; active_set[j]=tmp;} while(false);
  322. do {double tmp=G_bar[i]; G_bar[i]=G_bar[j]; G_bar[j]=tmp;} while(false);
  323. }
  324. void reconstruct_gradient()
  325. {
  326. // reconstruct inactive elements of G from G_bar and free variables
  327. if(active_size == l) return;
  328. int i,j;
  329. int nr_free = 0;
  330. for(j=active_size;j<l;j++)
  331. G[j] = G_bar[j] + p[j];
  332. for(j=0;j<active_size;j++)
  333. if(is_free(j))
  334. nr_free++;
  335. if(2*nr_free < active_size)
  336. svm.info("\nWARNING: using -h 0 may be faster\n");
  337. if (nr_free*l > 2*active_size*(l-active_size))
  338. {
  339. for(i=active_size;i<l;i++)
  340. {
  341. float[] Q_i = Q.get_Q(i,active_size);
  342. for(j=0;j<active_size;j++)
  343. if(is_free(j))
  344. G[i] += alpha[j] * Q_i[j];
  345. }
  346. }
  347. else
  348. {
  349. for(i=0;i<active_size;i++)
  350. if(is_free(i))
  351. {
  352. float[] Q_i = Q.get_Q(i,l);
  353. double alpha_i = alpha[i];
  354. for(j=active_size;j<l;j++)
  355. G[j] += alpha_i * Q_i[j];
  356. }
  357. }
  358. }
  359. void Solve(int l, QMatrix Q, double[] p_, byte[] y_,
  360. double[] alpha_, double Cp, double Cn, double eps, SolutionInfo si, int shrinking)
  361. {
  362. this.l = l;
  363. this.Q = Q;
  364. QD = Q.get_QD();
  365. p = (double[])p_.clone();
  366. y = (byte[])y_.clone();
  367. alpha = (double[])alpha_.clone();
  368. this.Cp = Cp;
  369. this.Cn = Cn;
  370. this.eps = eps;
  371. this.unshrink = false;
  372. // initialize alpha_status
  373. {
  374. alpha_status = new byte[l];
  375. for(int i=0;i<l;i++)
  376. update_alpha_status(i);
  377. }
  378. // initialize active set (for shrinking)
  379. {
  380. active_set = new int[l];
  381. for(int i=0;i<l;i++)
  382. active_set[i] = i;
  383. active_size = l;
  384. }
  385. // initialize gradient
  386. {
  387. G = new double[l];
  388. G_bar = new double[l];
  389. int i;
  390. for(i=0;i<l;i++)
  391. {
  392. G[i] = p[i];
  393. G_bar[i] = 0;
  394. }
  395. for(i=0;i<l;i++)
  396. if(!is_lower_bound(i))
  397. {
  398. float[] Q_i = Q.get_Q(i,l);
  399. double alpha_i = alpha[i];
  400. int j;
  401. for(j=0;j<l;j++)
  402. G[j] += alpha_i*Q_i[j];
  403. if(is_upper_bound(i))
  404. for(j=0;j<l;j++)
  405. G_bar[j] += get_C(i) * Q_i[j];
  406. }
  407. }
  408. // optimization step
  409. int iter = 0;
  410. int max_iter = Math.max(10000000, l>Integer.MAX_VALUE/100 ? Integer.MAX_VALUE : 100*l);
  411. int counter = Math.min(l,1000)+1;
  412. int[] working_set = new int[2];
  413. while(iter < max_iter)
  414. {
  415. // show progress and do shrinking
  416. if(--counter == 0)
  417. {
  418. counter = Math.min(l,1000);
  419. if(shrinking!=0) do_shrinking();
  420. svm.info(".");
  421. }
  422. if(select_working_set(working_set)!=0)
  423. {
  424. // reconstruct the whole gradient
  425. reconstruct_gradient();
  426. // reset active set size and check
  427. active_size = l;
  428. svm.info("*");
  429. if(select_working_set(working_set)!=0)
  430. break;
  431. else
  432. counter = 1; // do shrinking next iteration
  433. }
  434. int i = working_set[0];
  435. int j = working_set[1];
  436. ++iter;
  437. // update alpha[i] and alpha[j], handle bounds carefully
  438. float[] Q_i = Q.get_Q(i,active_size);
  439. float[] Q_j = Q.get_Q(j,active_size);
  440. double C_i = get_C(i);
  441. double C_j = get_C(j);
  442. double old_alpha_i = alpha[i];
  443. double old_alpha_j = alpha[j];
  444. if(y[i]!=y[j])
  445. {
  446. double quad_coef = QD[i]+QD[j]+2*Q_i[j];
  447. if (quad_coef <= 0)
  448. quad_coef = 1e-12;
  449. double delta = (-G[i]-G[j])/quad_coef;
  450. double diff = alpha[i] - alpha[j];
  451. alpha[i] += delta;
  452. alpha[j] += delta;
  453. if(diff > 0)
  454. {
  455. if(alpha[j] < 0)
  456. {
  457. alpha[j] = 0;
  458. alpha[i] = diff;
  459. }
  460. }
  461. else
  462. {
  463. if(alpha[i] < 0)
  464. {
  465. alpha[i] = 0;
  466. alpha[j] = -diff;
  467. }
  468. }
  469. if(diff > C_i - C_j)
  470. {
  471. if(alpha[i] > C_i)
  472. {
  473. alpha[i] = C_i;
  474. alpha[j] = C_i - diff;
  475. }
  476. }
  477. else
  478. {
  479. if(alpha[j] > C_j)
  480. {
  481. alpha[j] = C_j;
  482. alpha[i] = C_j + diff;
  483. }
  484. }
  485. }
  486. else
  487. {
  488. double quad_coef = QD[i]+QD[j]-2*Q_i[j];
  489. if (quad_coef <= 0)
  490. quad_coef = 1e-12;
  491. double delta = (G[i]-G[j])/quad_coef;
  492. double sum = alpha[i] + alpha[j];
  493. alpha[i] -= delta;
  494. alpha[j] += delta;
  495. if(sum > C_i)
  496. {
  497. if(alpha[i] > C_i)
  498. {
  499. alpha[i] = C_i;
  500. alpha[j] = sum - C_i;
  501. }
  502. }
  503. else
  504. {
  505. if(alpha[j] < 0)
  506. {
  507. alpha[j] = 0;
  508. alpha[i] = sum;
  509. }
  510. }
  511. if(sum > C_j)
  512. {
  513. if(alpha[j] > C_j)
  514. {
  515. alpha[j] = C_j;
  516. alpha[i] = sum - C_j;
  517. }
  518. }
  519. else
  520. {
  521. if(alpha[i] < 0)
  522. {
  523. alpha[i] = 0;
  524. alpha[j] = sum;
  525. }
  526. }
  527. }
  528. // update G
  529. double delta_alpha_i = alpha[i] - old_alpha_i;
  530. double delta_alpha_j = alpha[j] - old_alpha_j;
  531. for(int k=0;k<active_size;k++)
  532. {
  533. G[k] += Q_i[k]*delta_alpha_i + Q_j[k]*delta_alpha_j;
  534. }
  535. // update alpha_status and G_bar
  536. {
  537. boolean ui = is_upper_bound(i);
  538. boolean uj = is_upper_bound(j);
  539. update_alpha_status(i);
  540. update_alpha_status(j);
  541. int k;
  542. if(ui != is_upper_bound(i))
  543. {
  544. Q_i = Q.get_Q(i,l);
  545. if(ui)
  546. for(k=0;k<l;k++)
  547. G_bar[k] -= C_i * Q_i[k];
  548. else
  549. for(k=0;k<l;k++)
  550. G_bar[k] += C_i * Q_i[k];
  551. }
  552. if(uj != is_upper_bound(j))
  553. {
  554. Q_j = Q.get_Q(j,l);
  555. if(uj)
  556. for(k=0;k<l;k++)
  557. G_bar[k] -= C_j * Q_j[k];
  558. else
  559. for(k=0;k<l;k++)
  560. G_bar[k] += C_j * Q_j[k];
  561. }
  562. }
  563. }
  564. if(iter >= max_iter)
  565. {
  566. if(active_size < l)
  567. {
  568. // reconstruct the whole gradient to calculate objective value
  569. reconstruct_gradient();
  570. active_size = l;
  571. svm.info("*");
  572. }
  573. System.err.print("\nWARNING: reaching max number of iterations\n");
  574. }
  575. // calculate rho
  576. si.rho = calculate_rho();
  577. // calculate objective value
  578. {
  579. double v = 0;
  580. int i;
  581. for(i=0;i<l;i++)
  582. v += alpha[i] * (G[i] + p[i]);
  583. si.obj = v/2;
  584. }
  585. // put back the solution
  586. {
  587. for(int i=0;i<l;i++)
  588. alpha_[active_set[i]] = alpha[i];
  589. }
  590. si.upper_bound_p = Cp;
  591. si.upper_bound_n = Cn;
  592. svm.info("\noptimization finished, #iter = "+iter+"\n");
  593. }
  594. // return 1 if already optimal, return 0 otherwise
  595. int select_working_set(int[] working_set)
  596. {
  597. // return i,j such that
  598. // i: maximizes -y_i * grad(f)_i, i in I_up(\alpha)
  599. // j: mimimizes the decrease of obj value
  600. // (if quadratic coefficeint <= 0, replace it with tau)
  601. // -y_j*grad(f)_j < -y_i*grad(f)_i, j in I_low(\alpha)
  602. double Gmax = -INF;
  603. double Gmax2 = -INF;
  604. int Gmax_idx = -1;
  605. int Gmin_idx = -1;
  606. double obj_diff_min = INF;
  607. for(int t=0;t<active_size;t++)
  608. if(y[t]==+1)
  609. {
  610. if(!is_upper_bound(t))
  611. if(-G[t] >= Gmax)
  612. {
  613. Gmax = -G[t];
  614. Gmax_idx = t;
  615. }
  616. }
  617. else
  618. {
  619. if(!is_lower_bound(t))
  620. if(G[t] >= Gmax)
  621. {
  622. Gmax = G[t];
  623. Gmax_idx = t;
  624. }
  625. }
  626. int i = Gmax_idx;
  627. float[] Q_i = null;
  628. if(i != -1) // null Q_i not accessed: Gmax=-INF if i=-1
  629. Q_i = Q.get_Q(i,active_size);
  630. for(int j=0;j<active_size;j++)
  631. {
  632. if(y[j]==+1)
  633. {
  634. if (!is_lower_bound(j))
  635. {
  636. double grad_diff=Gmax+G[j];
  637. if (G[j] >= Gmax2)
  638. Gmax2 = G[j];
  639. if (grad_diff > 0)
  640. {
  641. double obj_diff;
  642. double quad_coef = QD[i]+QD[j]-2.0*y[i]*Q_i[j];
  643. if (quad_coef > 0)
  644. obj_diff = -(grad_diff*grad_diff)/quad_coef;
  645. else
  646. obj_diff = -(grad_diff*grad_diff)/1e-12;
  647. if (obj_diff <= obj_diff_min)
  648. {
  649. Gmin_idx=j;
  650. obj_diff_min = obj_diff;
  651. }
  652. }
  653. }
  654. }
  655. else
  656. {
  657. if (!is_upper_bound(j))
  658. {
  659. double grad_diff= Gmax-G[j];
  660. if (-G[j] >= Gmax2)
  661. Gmax2 = -G[j];
  662. if (grad_diff > 0)
  663. {
  664. double obj_diff;
  665. double quad_coef = QD[i]+QD[j]+2.0*y[i]*Q_i[j];
  666. if (quad_coef > 0)
  667. obj_diff = -(grad_diff*grad_diff)/quad_coef;
  668. else
  669. obj_diff = -(grad_diff*grad_diff)/1e-12;
  670. if (obj_diff <= obj_diff_min)
  671. {
  672. Gmin_idx=j;
  673. obj_diff_min = obj_diff;
  674. }
  675. }
  676. }
  677. }
  678. }
  679. if(Gmax+Gmax2 < eps || Gmin_idx == -1)
  680. return 1;
  681. working_set[0] = Gmax_idx;
  682. working_set[1] = Gmin_idx;
  683. return 0;
  684. }
  685. private boolean be_shrunk(int i, double Gmax1, double Gmax2)
  686. {
  687. if(is_upper_bound(i))
  688. {
  689. if(y[i]==+1)
  690. return(-G[i] > Gmax1);
  691. else
  692. return(-G[i] > Gmax2);
  693. }
  694. else if(is_lower_bound(i))
  695. {
  696. if(y[i]==+1)
  697. return(G[i] > Gmax2);
  698. else
  699. return(G[i] > Gmax1);
  700. }
  701. else
  702. return(false);
  703. }
  704. void do_shrinking()
  705. {
  706. int i;
  707. double Gmax1 = -INF; // max { -y_i * grad(f)_i | i in I_up(\alpha) }
  708. double Gmax2 = -INF; // max { y_i * grad(f)_i | i in I_low(\alpha) }
  709. // find maximal violating pair first
  710. for(i=0;i<active_size;i++)
  711. {
  712. if(y[i]==+1)
  713. {
  714. if(!is_upper_bound(i))
  715. {
  716. if(-G[i] >= Gmax1)
  717. Gmax1 = -G[i];
  718. }
  719. if(!is_lower_bound(i))
  720. {
  721. if(G[i] >= Gmax2)
  722. Gmax2 = G[i];
  723. }
  724. }
  725. else
  726. {
  727. if(!is_upper_bound(i))
  728. {
  729. if(-G[i] >= Gmax2)
  730. Gmax2 = -G[i];
  731. }
  732. if(!is_lower_bound(i))
  733. {
  734. if(G[i] >= Gmax1)
  735. Gmax1 = G[i];
  736. }
  737. }
  738. }
  739. if(unshrink == false && Gmax1 + Gmax2 <= eps*10)
  740. {
  741. unshrink = true;
  742. reconstruct_gradient();
  743. active_size = l;
  744. }
  745. for(i=0;i<active_size;i++)
  746. if (be_shrunk(i, Gmax1, Gmax2))
  747. {
  748. active_size--;
  749. while (active_size > i)
  750. {
  751. if (!be_shrunk(active_size, Gmax1, Gmax2))
  752. {
  753. swap_index(i,active_size);
  754. break;
  755. }
  756. active_size--;
  757. }
  758. }
  759. }
  760. double calculate_rho()
  761. {
  762. double r;
  763. int nr_free = 0;
  764. double ub = INF, lb = -INF, sum_free = 0;
  765. for(int i=0;i<active_size;i++)
  766. {
  767. double yG = y[i]*G[i];
  768. if(is_lower_bound(i))
  769. {
  770. if(y[i] > 0)
  771. ub = Math.min(ub,yG);
  772. else
  773. lb = Math.max(lb,yG);
  774. }
  775. else if(is_upper_bound(i))
  776. {
  777. if(y[i] < 0)
  778. ub = Math.min(ub,yG);
  779. else
  780. lb = Math.max(lb,yG);
  781. }
  782. else
  783. {
  784. ++nr_free;
  785. sum_free += yG;
  786. }
  787. }
  788. if(nr_free>0)
  789. r = sum_free/nr_free;
  790. else
  791. r = (ub+lb)/2;
  792. return r;
  793. }
  794. }
  795. //
  796. // Solver for nu-svm classification and regression
  797. //
  798. // additional constraint: e^T \alpha = constant
  799. //
  800. final class Solver_NU extends Solver
  801. {
  802. private SolutionInfo si;
  803. void Solve(int l, QMatrix Q, double[] p, byte[] y,
  804. double[] alpha, double Cp, double Cn, double eps,
  805. SolutionInfo si, int shrinking)
  806. {
  807. this.si = si;
  808. super.Solve(l,Q,p,y,alpha,Cp,Cn,eps,si,shrinking);
  809. }
  810. // return 1 if already optimal, return 0 otherwise
  811. int select_working_set(int[] working_set)
  812. {
  813. // return i,j such that y_i = y_j and
  814. // i: maximizes -y_i * grad(f)_i, i in I_up(\alpha)
  815. // j: minimizes the decrease of obj value
  816. // (if quadratic coefficeint <= 0, replace it with tau)
  817. // -y_j*grad(f)_j < -y_i*grad(f)_i, j in I_low(\alpha)
  818. double Gmaxp = -INF;
  819. double Gmaxp2 = -INF;
  820. int Gmaxp_idx = -1;
  821. double Gmaxn = -INF;
  822. double Gmaxn2 = -INF;
  823. int Gmaxn_idx = -1;
  824. int Gmin_idx = -1;
  825. double obj_diff_min = INF;
  826. for(int t=0;t<active_size;t++)
  827. if(y[t]==+1)
  828. {
  829. if(!is_upper_bound(t))
  830. if(-G[t] >= Gmaxp)
  831. {
  832. Gmaxp = -G[t];
  833. Gmaxp_idx = t;
  834. }
  835. }
  836. else
  837. {
  838. if(!is_lower_bound(t))
  839. if(G[t] >= Gmaxn)
  840. {
  841. Gmaxn = G[t];
  842. Gmaxn_idx = t;
  843. }
  844. }
  845. int ip = Gmaxp_idx;
  846. int in = Gmaxn_idx;
  847. float[] Q_ip = null;
  848. float[] Q_in = null;
  849. if(ip != -1) // null Q_ip not accessed: Gmaxp=-INF if ip=-1
  850. Q_ip = Q.get_Q(ip,active_size);
  851. if(in != -1)
  852. Q_in = Q.get_Q(in,active_size);
  853. for(int j=0;j<active_size;j++)
  854. {
  855. if(y[j]==+1)
  856. {
  857. if (!is_lower_bound(j))
  858. {
  859. double grad_diff=Gmaxp+G[j];
  860. if (G[j] >= Gmaxp2)
  861. Gmaxp2 = G[j];
  862. if (grad_diff > 0)
  863. {
  864. double obj_diff;
  865. double quad_coef = QD[ip]+QD[j]-2*Q_ip[j];
  866. if (quad_coef > 0)
  867. obj_diff = -(grad_diff*grad_diff)/quad_coef;
  868. else
  869. obj_diff = -(grad_diff*grad_diff)/1e-12;
  870. if (obj_diff <= obj_diff_min)
  871. {
  872. Gmin_idx=j;
  873. obj_diff_min = obj_diff;
  874. }
  875. }
  876. }
  877. }
  878. else
  879. {
  880. if (!is_upper_bound(j))
  881. {
  882. double grad_diff=Gmaxn-G[j];
  883. if (-G[j] >= Gmaxn2)
  884. Gmaxn2 = -G[j];
  885. if (grad_diff > 0)
  886. {
  887. double obj_diff;
  888. double quad_coef = QD[in]+QD[j]-2*Q_in[j];
  889. if (quad_coef > 0)
  890. obj_diff = -(grad_diff*grad_diff)/quad_coef;
  891. else
  892. obj_diff = -(grad_diff*grad_diff)/1e-12;
  893. if (obj_diff <= obj_diff_min)
  894. {
  895. Gmin_idx=j;
  896. obj_diff_min = obj_diff;
  897. }
  898. }
  899. }
  900. }
  901. }
  902. if(Math.max(Gmaxp+Gmaxp2,Gmaxn+Gmaxn2) < eps || Gmin_idx == -1)
  903. return 1;
  904. if(y[Gmin_idx] == +1)
  905. working_set[0] = Gmaxp_idx;
  906. else
  907. working_set[0] = Gmaxn_idx;
  908. working_set[1] = Gmin_idx;
  909. return 0;
  910. }
  911. private boolean be_shrunk(int i, double Gmax1, double Gmax2, double Gmax3, double Gmax4)
  912. {
  913. if(is_upper_bound(i))
  914. {
  915. if(y[i]==+1)
  916. return(-G[i] > Gmax1);
  917. else
  918. return(-G[i] > Gmax4);
  919. }
  920. else if(is_lower_bound(i))
  921. {
  922. if(y[i]==+1)
  923. return(G[i] > Gmax2);
  924. else
  925. return(G[i] > Gmax3);
  926. }
  927. else
  928. return(false);
  929. }
  930. void do_shrinking()
  931. {
  932. double Gmax1 = -INF; // max { -y_i * grad(f)_i | y_i = +1, i in I_up(\alpha) }
  933. double Gmax2 = -INF; // max { y_i * grad(f)_i | y_i = +1, i in I_low(\alpha) }
  934. double Gmax3 = -INF; // max { -y_i * grad(f)_i | y_i = -1, i in I_up(\alpha) }
  935. double Gmax4 = -INF; // max { y_i * grad(f)_i | y_i = -1, i in I_low(\alpha) }
  936. // find maximal violating pair first
  937. int i;
  938. for(i=0;i<active_size;i++)
  939. {
  940. if(!is_upper_bound(i))
  941. {
  942. if(y[i]==+1)
  943. {
  944. if(-G[i] > Gmax1) Gmax1 = -G[i];
  945. }
  946. else if(-G[i] > Gmax4) Gmax4 = -G[i];
  947. }
  948. if(!is_lower_bound(i))
  949. {
  950. if(y[i]==+1)
  951. {
  952. if(G[i] > Gmax2) Gmax2 = G[i];
  953. }
  954. else if(G[i] > Gmax3) Gmax3 = G[i];
  955. }
  956. }
  957. if(unshrink == false && Math.max(Gmax1+Gmax2,Gmax3+Gmax4) <= eps*10)
  958. {
  959. unshrink = true;
  960. reconstruct_gradient();
  961. active_size = l;
  962. }
  963. for(i=0;i<active_size;i++)
  964. if (be_shrunk(i, Gmax1, Gmax2, Gmax3, Gmax4))
  965. {
  966. active_size--;
  967. while (active_size > i)
  968. {
  969. if (!be_shrunk(active_size, Gmax1, Gmax2, Gmax3, Gmax4))
  970. {
  971. swap_index(i,active_size);
  972. break;
  973. }
  974. active_size--;
  975. }
  976. }
  977. }
  978. double calculate_rho()
  979. {
  980. int nr_free1 = 0,nr_free2 = 0;
  981. double ub1 = INF, ub2 = INF;
  982. double lb1 = -INF, lb2 = -INF;
  983. double sum_free1 = 0, sum_free2 = 0;
  984. for(int i=0;i<active_size;i++)
  985. {
  986. if(y[i]==+1)
  987. {
  988. if(is_lower_bound(i))
  989. ub1 = Math.min(ub1,G[i]);
  990. else if(is_upper_bound(i))
  991. lb1 = Math.max(lb1,G[i]);
  992. else
  993. {
  994. ++nr_free1;
  995. sum_free1 += G[i];
  996. }
  997. }
  998. else
  999. {
  1000. if(is_lower_bound(i))
  1001. ub2 = Math.min(ub2,G[i]);
  1002. else if(is_upper_bound(i))
  1003. lb2 = Math.max(lb2,G[i]);
  1004. else
  1005. {
  1006. ++nr_free2;
  1007. sum_free2 += G[i];
  1008. }
  1009. }
  1010. }
  1011. double r1,r2;
  1012. if(nr_free1 > 0)
  1013. r1 = sum_free1/nr_free1;
  1014. else
  1015. r1 = (ub1+lb1)/2;
  1016. if(nr_free2 > 0)
  1017. r2 = sum_free2/nr_free2;
  1018. else
  1019. r2 = (ub2+lb2)/2;
  1020. si.r = (r1+r2)/2;
  1021. return (r1-r2)/2;
  1022. }
  1023. }
  1024. //
  1025. // Q matrices for various formulations
  1026. //
  1027. class SVC_Q extends Kernel
  1028. {
  1029. private final byte[] y;
  1030. private final Cache cache;
  1031. private final double[] QD;
  1032. SVC_Q(svm_problem prob, svm_parameter param, byte[] y_)
  1033. {
  1034. super(prob.l, prob.x, param);
  1035. y = (byte[])y_.clone();
  1036. cache = new Cache(prob.l,(long)(param.cache_size*(1<<20)));
  1037. QD = new double[prob.l];
  1038. for(int i=0;i<prob.l;i++)
  1039. QD[i] = kernel_function(i,i);
  1040. }
  1041. float[] get_Q(int i, int len)
  1042. {
  1043. float[][] data = new float[1][];
  1044. int start, j;
  1045. if((start = cache.get_data(i,data,len)) < len)
  1046. {
  1047. for(j=start;j<len;j++)
  1048. data[0][j] = (float)(y[i]*y[j]*kernel_function(i,j));
  1049. }
  1050. return data[0];
  1051. }
  1052. double[] get_QD()
  1053. {
  1054. return QD;
  1055. }
  1056. void swap_index(int i, int j)
  1057. {
  1058. cache.swap_index(i,j);
  1059. super.swap_index(i,j);
  1060. do {byte tmp=y[i]; y[i]=y[j]; y[j]=tmp;} while(false);
  1061. do {double tmp=QD[i]; QD[i]=QD[j]; QD[j]=tmp;} while(false);
  1062. }
  1063. }
  1064. class ONE_CLASS_Q extends Kernel
  1065. {
  1066. private final Cache cache;
  1067. private final double[] QD;
  1068. ONE_CLASS_Q(svm_problem prob, svm_parameter param)
  1069. {
  1070. super(prob.l, prob.x, param);
  1071. cache = new Cache(prob.l,(long)(param.cache_size*(1<<20)));
  1072. QD = new double[prob.l];
  1073. for(int i=0;i<prob.l;i++)
  1074. QD[i] = kernel_function(i,i);
  1075. }
  1076. float[] get_Q(int i, int len)
  1077. {
  1078. float[][] data = new float[1][];
  1079. int start, j;
  1080. if((start = cache.get_data(i,data,len)) < len)
  1081. {
  1082. for(j=start;j<len;j++)
  1083. data[0][j] = (float)kernel_function(i,j);
  1084. }
  1085. return data[0];
  1086. }
  1087. double[] get_QD()
  1088. {
  1089. return QD;
  1090. }
  1091. void swap_index(int i, int j)
  1092. {
  1093. cache.swap_index(i,j);
  1094. super.swap_index(i,j);
  1095. do {double tmp=QD[i]; QD[i]=QD[j]; QD[j]=tmp;} while(false);
  1096. }
  1097. }
  1098. class SVR_Q extends Kernel
  1099. {
  1100. private final int l;
  1101. private final Cache cache;
  1102. private final byte[] sign;
  1103. private final int[] index;
  1104. private int next_buffer;
  1105. private float[][] buffer;
  1106. private final double[] QD;
  1107. SVR_Q(svm_problem prob, svm_parameter param)
  1108. {
  1109. super(prob.l, prob.x, param);
  1110. l = prob.l;
  1111. cache = new Cache(l,(long)(param.cache_size*(1<<20)));
  1112. QD = new double[2*l];
  1113. sign = new byte[2*l];
  1114. index = new int[2*l];
  1115. for(int k=0;k<l;k++)
  1116. {
  1117. sign[k] = 1;
  1118. sign[k+l] = -1;
  1119. index[k] = k;
  1120. index[k+l] = k;
  1121. QD[k] = kernel_function(k,k);
  1122. QD[k+l] = QD[k];
  1123. }
  1124. buffer = new float[2][2*l];
  1125. next_buffer = 0;
  1126. }
  1127. void swap_index(int i, int j)
  1128. {
  1129. do {byte tmp=sign[i]; sign[i]=sign[j]; sign[j]=tmp;} while(false);
  1130. do {int tmp=index[i]; index[i]=index[j]; index[j]=tmp;} while(false);
  1131. do {double tmp=QD[i]; QD[i]=QD[j]; QD[j]=tmp;} while(false);
  1132. }
  1133. float[] get_Q(int i, int len)
  1134. {
  1135. float[][] data = new float[1][];
  1136. int j, real_i = index[i];
  1137. if(cache.get_data(real_i,data,l) < l)
  1138. {
  1139. for(j=0;j<l;j++)
  1140. data[0][j] = (float)kernel_function(real_i,j);
  1141. }
  1142. // reorder and copy
  1143. float buf[] = buffer[next_buffer];
  1144. next_buffer = 1 - next_buffer;
  1145. byte si = sign[i];
  1146. for(j=0;j<len;j++)
  1147. buf[j] = (float) si * sign[j] * data[0][index[j]];
  1148. return buf;
  1149. }
  1150. double[] get_QD()
  1151. {
  1152. return QD;
  1153. }
  1154. }
  1155. public class svm {
  1156. //
  1157. // construct and solve various formulations
  1158. //
  1159. public static final int LIBSVM_VERSION=322;
  1160. public static final Random rand = new Random();
  1161. private static svm_print_interface svm_print_stdout = new svm_print_interface()
  1162. {
  1163. public void print(String s)
  1164. {
  1165. System.out.print(s);
  1166. System.out.flush();
  1167. }
  1168. };
  1169. private static svm_print_interface svm_print_string = svm_print_stdout;
  1170. static void info(String s)
  1171. {
  1172. svm_print_string.print(s);
  1173. }
  1174. private static void solve_c_svc(svm_problem prob, svm_parameter param,
  1175. double[] alpha, Solver.SolutionInfo si,
  1176. double Cp, double Cn)
  1177. {
  1178. int l = prob.l;
  1179. double[] minus_ones = new double[l];
  1180. byte[] y = new byte[l];
  1181. int i;
  1182. for(i=0;i<l;i++)
  1183. {
  1184. alpha[i] = 0;
  1185. minus_ones[i] = -1;
  1186. if(prob.y[i] > 0) y[i] = +1; else y[i] = -1;
  1187. }
  1188. Solver s = new Solver();
  1189. s.Solve(l, new SVC_Q(prob,param,y), minus_ones, y,
  1190. alpha, Cp, Cn, param.eps, si, param.shrinking);
  1191. double sum_alpha=0;
  1192. for(i=0;i<l;i++)
  1193. sum_alpha += alpha[i];
  1194. if (Cp==Cn)
  1195. svm.info("nu = "+sum_alpha/(Cp*prob.l)+"\n");
  1196. for(i=0;i<l;i++)
  1197. alpha[i] *= y[i];
  1198. }
  1199. private static void solve_nu_svc(svm_problem prob, svm_parameter param,
  1200. double[] alpha, Solver.SolutionInfo si)
  1201. {
  1202. int i;
  1203. int l = prob.l;
  1204. double nu = param.nu;
  1205. byte[] y = new byte[l];
  1206. for(i=0;i<l;i++)
  1207. if(prob.y[i]>0)
  1208. y[i] = +1;
  1209. else
  1210. y[i] = -1;
  1211. double sum_pos = nu*l/2;
  1212. double sum_neg = nu*l/2;
  1213. for(i=0;i<l;i++)
  1214. if(y[i] == +1)
  1215. {
  1216. alpha[i] = Math.min(1.0,sum_pos);
  1217. sum_pos -= alpha[i];
  1218. }
  1219. else
  1220. {
  1221. alpha[i] = Math.min(1.0,sum_neg);
  1222. sum_neg -= alpha[i];
  1223. }
  1224. double[] zeros = new double[l];
  1225. for(i=0;i<l;i++)
  1226. zeros[i] = 0;
  1227. Solver_NU s = new Solver_NU();
  1228. s.Solve(l, new SVC_Q(prob,param,y), zeros, y,
  1229. alpha, 1.0, 1.0, param.eps, si, param.shrinking);
  1230. double r = si.r;
  1231. svm.info("C = "+1/r+"\n");
  1232. for(i=0;i<l;i++)
  1233. alpha[i] *= y[i]/r;
  1234. si.rho /= r;
  1235. si.obj /= (r*r);
  1236. si.upper_bound_p = 1/r;
  1237. si.upper_bound_n = 1/r;
  1238. }
  1239. private static void solve_one_class(svm_problem prob, svm_parameter param,
  1240. double[] alpha, Solver.SolutionInfo si)
  1241. {
  1242. int l = prob.l;
  1243. double[] zeros = new double[l];
  1244. byte[] ones = new byte[l];
  1245. int i;
  1246. int n = (int)(param.nu*prob.l); // # of alpha's at upper bound
  1247. for(i=0;i<n;i++)
  1248. alpha[i] = 1;
  1249. if(n<prob.l)
  1250. alpha[n] = param.nu * prob.l - n;
  1251. for(i=n+1;i<l;i++)
  1252. alpha[i] = 0;
  1253. for(i=0;i<l;i++)
  1254. {
  1255. zeros[i] = 0;
  1256. ones[i] = 1;
  1257. }
  1258. Solver s = new Solver();
  1259. s.Solve(l, new ONE_CLASS_Q(prob,param), zeros, ones,
  1260. alpha, 1.0, 1.0, param.eps, si, param.shrinking);
  1261. }
  1262. private static void solve_epsilon_svr(svm_problem prob, svm_parameter param,
  1263. double[] alpha, Solver.SolutionInfo si)
  1264. {
  1265. int l = prob.l;
  1266. double[] alpha2 = new double[2*l];
  1267. double[] linear_term = new double[2*l];
  1268. byte[] y = new byte[2*l];
  1269. int i;
  1270. for(i=0;i<l;i++)
  1271. {
  1272. alpha2[i] = 0;
  1273. linear_term[i] = param.p - prob.y[i];
  1274. y[i] = 1;
  1275. alpha2[i+l] = 0;
  1276. linear_term[i+l] = param.p + prob.y[i];
  1277. y[i+l] = -1;
  1278. }
  1279. Solver s = new Solver();
  1280. s.Solve(2*l, new SVR_Q(prob,param), linear_term, y,
  1281. alpha2, param.C, param.C, param.eps, si, param.shrinking);
  1282. double sum_alpha = 0;
  1283. for(i=0;i<l;i++)
  1284. {
  1285. alpha[i] = alpha2[i] - alpha2[i+l];
  1286. sum_alpha += Math.abs(alpha[i]);
  1287. }
  1288. svm.info("nu = "+sum_alpha/(param.C*l)+"\n");
  1289. }
  1290. private static void solve_nu_svr(svm_problem prob, svm_parameter param,
  1291. double[] alpha, Solver.SolutionInfo si)
  1292. {
  1293. int l = prob.l;
  1294. double C = param.C;
  1295. double[] alpha2 = new double[2*l];
  1296. double[] linear_term = new double[2*l];
  1297. byte[] y = new byte[2*l];
  1298. int i;
  1299. double sum = C * param.nu * l / 2;
  1300. for(i=0;i<l;i++)
  1301. {
  1302. alpha2[i] = alpha2[i+l] = Math.min(sum,C);
  1303. sum -= alpha2[i];
  1304. linear_term[i] = - prob.y[i];
  1305. y[i] = 1;
  1306. linear_term[i+l] = prob.y[i];
  1307. y[i+l] = -1;
  1308. }
  1309. Solver_NU s = new Solver_NU();
  1310. s.Solve(2*l, new SVR_Q(prob,param), linear_term, y,
  1311. alpha2, C, C, param.eps, si, param.shrinking);
  1312. svm.info("epsilon = "+(-si.r)+"\n");
  1313. for(i=0;i<l;i++)
  1314. alpha[i] = alpha2[i] - alpha2[i+l];
  1315. }
  1316. //
  1317. // decision_function
  1318. //
  1319. static class decision_function
  1320. {
  1321. double[] alpha;
  1322. double rho;
  1323. };
  1324. static decision_function svm_train_one(
  1325. svm_problem prob, svm_parameter param,
  1326. double Cp, double Cn)
  1327. {
  1328. double[] alpha = new double[prob.l];
  1329. Solver.SolutionInfo si = new Solver.SolutionInfo();
  1330. switch(param.svm_type)
  1331. {
  1332. case svm_parameter.C_SVC:
  1333. solve_c_svc(prob,param,alpha,si,Cp,Cn);
  1334. break;
  1335. case svm_parameter.NU_SVC:
  1336. solve_nu_svc(prob,param,alpha,si);
  1337. break;
  1338. case svm_parameter.ONE_CLASS:
  1339. solve_one_class(prob,param,alpha,si);
  1340. break;
  1341. case svm_parameter.EPSILON_SVR:
  1342. solve_epsilon_svr(prob,param,alpha,si);
  1343. break;
  1344. case svm_parameter.NU_SVR:
  1345. solve_nu_svr(prob,param,alpha,si);
  1346. break;
  1347. }
  1348. svm.info("obj = "+si.obj+", rho = "+si.rho+"\n");
  1349. // output SVs
  1350. int nSV = 0;
  1351. int nBSV = 0;
  1352. for(int i=0;i<prob.l;i++)
  1353. {
  1354. if(Math.abs(alpha[i]) > 0)
  1355. {
  1356. ++nSV;
  1357. if(prob.y[i] > 0)
  1358. {
  1359. if(Math.abs(alpha[i]) >= si.upper_bound_p)
  1360. ++nBSV;
  1361. }
  1362. else
  1363. {
  1364. if(Math.abs(alpha[i]) >= si.upper_bound_n)
  1365. ++nBSV;
  1366. }
  1367. }
  1368. }
  1369. svm.info("nSV = "+nSV+", nBSV = "+nBSV+"\n");
  1370. decision_function f = new decision_function();
  1371. f.alpha = alpha;
  1372. f.rho = si.rho;
  1373. return f;
  1374. }
  1375. // Platt's binary SVM Probablistic Output: an improvement from Lin et al.
  1376. private static void sigmoid_train(int l, double[] dec_values, double[] labels,
  1377. double[] probAB)
  1378. {
  1379. double A, B;
  1380. double prior1=0, prior0 = 0;
  1381. int i;
  1382. for (i=0;i<l;i++)
  1383. if (labels[i] > 0) prior1+=1;
  1384. else prior0+=1;
  1385. int max_iter=100; // Maximal number of iterations
  1386. double min_step=1e-10; // Minimal step taken in line search
  1387. double sigma=1e-12; // For numerically strict PD of Hessian
  1388. double eps=1e-5;
  1389. double hiTarget=(prior1+1.0)/(prior1+2.0);
  1390. double loTarget=1/(prior0+2.0);
  1391. double[] t= new double[l];
  1392. double fApB,p,q,h11,h22,h21,g1,g2,det,dA,dB,gd,stepsize;
  1393. double newA,newB,newf,d1,d2;
  1394. int iter;
  1395. // Initial Point and Initial Fun Value
  1396. A=0.0; B=Math.log((prior0+1.0)/(prior1+1.0));
  1397. double fval = 0.0;
  1398. for (i=0;i<l;i++)
  1399. {
  1400. if (labels[i]>0) t[i]=hiTarget;
  1401. else t[i]=loTarget;
  1402. fApB = dec_values[i]*A+B;
  1403. if (fApB>=0)
  1404. fval += t[i]*fApB + Math.log(1+Math.exp(-fApB));
  1405. else
  1406. fval += (t[i] - 1)*fApB +Math.log(1+Math.exp(fApB));
  1407. }
  1408. for (iter=0;iter<max_iter;iter++)
  1409. {
  1410. // Update Gradient and Hessian (use H' = H + sigma I)
  1411. h11=sigma; // numerically ensures strict PD
  1412. h22=sigma;
  1413. h21=0.0;g1=0.0;g2=0.0;
  1414. for (i=0;i<l;i++)
  1415. {
  1416. fApB = dec_values[i]*A+B;
  1417. if (fApB >= 0)
  1418. {
  1419. p=Math.exp(-fApB)/(1.0+Math.exp(-fApB));
  1420. q=1.0/(1.0+Math.exp(-fApB));
  1421. }
  1422. else
  1423. {
  1424. p=1.0/(1.0+Math.exp(fApB));
  1425. q=Math.exp(fApB)/(1.0+Math.exp(fApB));
  1426. }
  1427. d2=p*q;
  1428. h11+=dec_values[i]*dec_values[i]*d2;
  1429. h22+=d2;
  1430. h21+=dec_values[i]*d2;
  1431. d1=t[i]-p;
  1432. g1+=dec_values[i]*d1;
  1433. g2+=d1;
  1434. }
  1435. // Stopping Criteria
  1436. if (Math.abs(g1)<eps && Math.abs(g2)<eps)
  1437. break;
  1438. // Finding Newton direction: -inv(H') * g
  1439. det=h11*h22-h21*h21;
  1440. dA=-(h22*g1 - h21 * g2) / det;
  1441. dB=-(-h21*g1+ h11 * g2) / det;
  1442. gd=g1*dA+g2*dB;
  1443. stepsize = 1; // Line Search
  1444. while (stepsize >= min_step)
  1445. {
  1446. newA = A + stepsize * dA;
  1447. newB = B + stepsize * dB;
  1448. // New function value
  1449. newf = 0.0;
  1450. for (i=0;i<l;i++)
  1451. {
  1452. fApB = dec_values[i]*newA+newB;
  1453. if (fApB >= 0)
  1454. newf += t[i]*fApB + Math.log(1+Math.exp(-fApB));
  1455. else
  1456. newf += (t[i] - 1)*fApB +Math.log(1+Math.exp(fApB));
  1457. }
  1458. // Check sufficient decrease
  1459. if (newf<fval+0.0001*stepsize*gd)
  1460. {
  1461. A=newA;B=newB;fval=newf;
  1462. break;
  1463. }
  1464. else
  1465. stepsize = stepsize / 2.0;
  1466. }
  1467. if (stepsize < min_step)
  1468. {
  1469. svm.info("Line search fails in two-class probability estimates\n");
  1470. break;
  1471. }
  1472. }
  1473. if (iter>=max_iter)
  1474. svm.info("Reaching maximal iterations in two-class probability estimates\n");
  1475. probAB[0]=A;probAB[1]=B;
  1476. }
  1477. private static double sigmoid_predict(double decision_value, double A, double B)
  1478. {
  1479. double fApB = decision_value*A+B;
  1480. if (fApB >= 0)
  1481. return Math.exp(-fApB)/(1.0+Math.exp(-fApB));
  1482. else
  1483. return 1.0/(1+Math.exp(fApB)) ;
  1484. }
  1485. // Method 2 from the multiclass_prob paper by Wu, Lin, and Weng
  1486. private static void multiclass_probability(int k, double[][] r, double[] p)
  1487. {
  1488. int t,j;
  1489. int iter = 0, max_iter=Math.max(100,k);
  1490. double[][] Q=new double[k][k];
  1491. double[] Qp=new double[k];
  1492. double pQp, eps=0.005/k;
  1493. for (t=0;t<k;t++)
  1494. {
  1495. p[t]=1.0/k; // Valid if k = 1
  1496. Q[t][t]=0;
  1497. for (j=0;j<t;j++)
  1498. {
  1499. Q[t][t]+=r[j][t]*r[j][t];
  1500. Q[t][j]=Q[j][t];
  1501. }
  1502. for (j=t+1;j<k;j++)
  1503. {
  1504. Q[t][t]+=r[j][t]*r[j][t];
  1505. Q[t][j]=-r[j][t]*r[t][j];
  1506. }
  1507. }
  1508. for (iter=0;iter<max_iter;iter++)
  1509. {
  1510. // stopping condition, recalculate QP,pQP for numerical accuracy
  1511. pQp=0;
  1512. for (t=0;t<k;t++)
  1513. {
  1514. Qp[t]=0;
  1515. for (j=0;j<k;j++)
  1516. Qp[t]+=Q[t][j]*p[j];
  1517. pQp+=p[t]*Qp[t];
  1518. }
  1519. double max_error=0;
  1520. for (t=0;t<k;t++)
  1521. {
  1522. double error=Math.abs(Qp[t]-pQp);
  1523. if (error>max_error)
  1524. max_error=error;
  1525. }
  1526. if (max_error<eps) break;
  1527. for (t=0;t<k;t++)
  1528. {
  1529. double diff=(-Qp[t]+pQp)/Q[t][t];
  1530. p[t]+=diff;
  1531. pQp=(pQp+diff*(diff*Q[t][t]+2*Qp[t]))/(1+diff)/(1+diff);
  1532. for (j=0;j<k;j++)
  1533. {
  1534. Qp[j]=(Qp[j]+diff*Q[t][j])/(1+diff);
  1535. p[j]/=(1+diff);
  1536. }
  1537. }
  1538. }
  1539. if (iter>=max_iter)
  1540. svm.info("Exceeds max_iter in multiclass_prob\n");
  1541. }
  1542. // Cross-validation decision values for probability estimates
  1543. private static void svm_binary_svc_probability(svm_problem prob, svm_parameter param, double Cp, double Cn, double[] probAB)
  1544. {
  1545. int i;
  1546. int nr_fold = 5;
  1547. int[] perm = new int[prob.l];
  1548. double[] dec_values = new double[prob.l];
  1549. // random shuffle
  1550. for(i=0;i<prob.l;i++) perm[i]=i;
  1551. for(i=0;i<prob.l;i++)
  1552. {
  1553. int j = i+rand.nextInt(prob.l-i);
  1554. do {int tmp=perm[i]; perm[i]=perm[j]; perm[j]=tmp;} while(false);
  1555. }
  1556. for(i=0;i<nr_fold;i++)
  1557. {
  1558. int begin = i*prob.l/nr_fold;
  1559. int end = (i+1)*prob.l/nr_fold;
  1560. int j,k;
  1561. svm_problem subprob = new svm_problem();
  1562. subprob.l = prob.l-(end-begin);
  1563. subprob.x = new svm_node[subprob.l][];
  1564. subprob.y = new double[subprob.l];
  1565. k=0;
  1566. for(j=0;j<begin;j++)
  1567. {
  1568. subprob.x[k] = prob.x[perm[j]];
  1569. subprob.y[k] = prob.y[perm[j]];
  1570. ++k;
  1571. }
  1572. for(j=end;j<prob.l;j++)
  1573. {
  1574. subprob.x[k] = prob.x[perm[j]];
  1575. subprob.y[k] = prob.y[perm[j]];
  1576. ++k;
  1577. }
  1578. int p_count=0,n_count=0;
  1579. for(j=0;j<k;j++)
  1580. if(subprob.y[j]>0)
  1581. p_count++;
  1582. else
  1583. n_count++;
  1584. if(p_count==0 && n_count==0)
  1585. for(j=begin;j<end;j++)
  1586. dec_values[perm[j]] = 0;
  1587. else if(p_count > 0 && n_count == 0)
  1588. for(j=begin;j<end;j++)
  1589. dec_values[perm[j]] = 1;
  1590. else if(p_count == 0 && n_count > 0)
  1591. for(j=begin;j<end;j++)
  1592. dec_values[perm[j]] = -1;
  1593. else
  1594. {
  1595. svm_parameter subparam = (svm_parameter)param.clone();
  1596. subparam.probability=0;
  1597. subparam.C=1.0;
  1598. subparam.nr_weight=2;
  1599. subparam.weight_label = new int[2];
  1600. subparam.weight = new double[2];
  1601. subparam.weight_label[0]=+1;
  1602. subparam.weight_label[1]=-1;
  1603. subparam.weight[0]=Cp;
  1604. subparam.weight[1]=Cn;
  1605. svm_model submodel = svm_train(subprob,subparam);
  1606. for(j=begin;j<end;j++)
  1607. {
  1608. double[] dec_value=new double[1];
  1609. svm_predict_values(submodel,prob.x[perm[j]],dec_value);
  1610. dec_values[perm[j]]=dec_value[0];
  1611. // ensure +1 -1 order; reason not using CV subroutine
  1612. dec_values[perm[j]] *= submodel.label[0];
  1613. }
  1614. }
  1615. }
  1616. sigmoid_train(prob.l,dec_values,prob.y,probAB);
  1617. }
  1618. // Return parameter of a Laplace distribution
  1619. private static double svm_svr_probability(svm_problem prob, svm_parameter param)
  1620. {
  1621. int i;
  1622. int nr_fold = 5;
  1623. double[] ymv = new double[prob.l];
  1624. double mae = 0;
  1625. svm_parameter newparam = (svm_parameter)param.clone();
  1626. newparam.probability = 0;
  1627. svm_cross_validation(prob,newparam,nr_fold,ymv);
  1628. for(i=0;i<prob.l;i++)
  1629. {
  1630. ymv[i]=prob.y[i]-ymv[i];
  1631. mae += Math.abs(ymv[i]);
  1632. }
  1633. mae /= prob.l;
  1634. double std=Math.sqrt(2*mae*mae);
  1635. int count=0;
  1636. mae=0;
  1637. for(i=0;i<prob.l;i++)
  1638. if (Math.abs(ymv[i]) > 5*std)
  1639. count=count+1;
  1640. else
  1641. mae+=Math.abs(ymv[i]);
  1642. mae /= (prob.l-count);
  1643. svm.info("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma="+mae+"\n");
  1644. return mae;
  1645. }
  1646. // label: label name, start: begin of each class, count: #data of classes, perm: indices to the original data
  1647. // perm, length l, must be allocated before calling this subroutine
  1648. private static void svm_group_classes(svm_problem prob, int[] nr_class_ret, int[][] label_ret, int[][] start_ret, int[][] count_ret, int[] perm)
  1649. {
  1650. int l = prob.l;
  1651. int max_nr_class = 16;
  1652. int nr_class = 0;
  1653. int[] label = new int[max_nr_class];
  1654. int[] count = new int[max_nr_class];
  1655. int[] data_label = new int[l];
  1656. int i;
  1657. for(i=0;i<l;i++)
  1658. {
  1659. int this_label = (int)(prob.y[i]);
  1660. int j;
  1661. for(j=0;j<nr_class;j++)
  1662. {
  1663. if(this_label == label[j])
  1664. {
  1665. ++count[j];
  1666. break;
  1667. }
  1668. }
  1669. data_label[i] = j;
  1670. if(j == nr_class)
  1671. {
  1672. if(nr_class == max_nr_class)
  1673. {
  1674. max_nr_class *= 2;
  1675. int[] new_data = new int[max_nr_class];
  1676. System.arraycopy(label,0,new_data,0,label.length);
  1677. label = new_data;
  1678. new_data = new int[max_nr_class];
  1679. System.arraycopy(count,0,new_data,0,count.length);
  1680. count = new_data;
  1681. }
  1682. label[nr_class] = this_label;
  1683. count[nr_class] = 1;
  1684. ++nr_class;
  1685. }
  1686. }
  1687. //
  1688. // Labels are ordered by their first occurrence in the training set.
  1689. // However, for two-class sets with -1/+1 labels and -1 appears first,
  1690. // we swap labels to ensure that internally the binary SVM has positive data corresponding to the +1 instances.
  1691. //
  1692. if (nr_class == 2 && label[0] == -1 && label[1] == +1)
  1693. {
  1694. do {int tmp=label[0]; label[0]=label[1]; label[1]=tmp;} while(false);
  1695. do {int tmp=count[0]; count[0]=count[1]; count[1]=tmp;} while(false);
  1696. for(i=0;i<l;i++)
  1697. {
  1698. if(data_label[i] == 0)
  1699. data_label[i] = 1;
  1700. else
  1701. data_label[i] = 0;
  1702. }
  1703. }
  1704. int[] start = new int[nr_class];
  1705. start[0] = 0;
  1706. for(i=1;i<nr_class;i++)
  1707. start[i] = start[i-1]+count[i-1];
  1708. for(i=0;i<l;i++)
  1709. {
  1710. perm[start[data_label[i]]] = i;
  1711. ++start[data_label[i]];
  1712. }
  1713. start[0] = 0;
  1714. for(i=1;i<nr_class;i++)
  1715. start[i] = start[i-1]+count[i-1];
  1716. nr_class_ret[0] = nr_class;
  1717. label_ret[0] = label;
  1718. start_ret[0] = start;
  1719. count_ret[0] = count;
  1720. }
  1721. //
  1722. // Interface functions
  1723. //
  1724. public static svm_model svm_train(svm_problem prob, svm_parameter param)
  1725. {
  1726. svm_model model = new svm_model();
  1727. model.param = param;
  1728. if(param.svm_type == svm_parameter.ONE_CLASS ||
  1729. param.svm_type == svm_parameter.EPSILON_SVR ||
  1730. param.svm_type == svm_parameter.NU_SVR)
  1731. {
  1732. // regression or one-class-svm
  1733. model.nr_class = 2;
  1734. model.label = null;
  1735. model.nSV = null;
  1736. model.probA = null; model.probB = null;
  1737. model.sv_coef = new double[1][];
  1738. if(param.probability == 1 &&
  1739. (param.svm_type == svm_parameter.EPSILON_SVR ||
  1740. param.svm_type == svm_parameter.NU_SVR))
  1741. {
  1742. model.probA = new double[1];
  1743. model.probA[0] = svm_svr_probability(prob,param);
  1744. }
  1745. decision_function f = svm_train_one(prob,param,0,0);
  1746. model.rho = new double[1];
  1747. model.rho[0] = f.rho;
  1748. int nSV = 0;
  1749. int i;
  1750. for(i=0;i<prob.l;i++)
  1751. if(Math.abs(f.alpha[i]) > 0) ++nSV;
  1752. model.l = nSV;
  1753. model.SV = new svm_node[nSV][];
  1754. model.sv_coef[0] = new double[nSV];
  1755. model.sv_indices = new int[nSV];
  1756. int j = 0;
  1757. for(i=0;i<prob.l;i++)
  1758. if(Math.abs(f.alpha[i]) > 0)
  1759. {
  1760. model.SV[j] = prob.x[i];
  1761. model.sv_coef[0][j] = f.alpha[i];
  1762. model.sv_indices[j] = i+1;
  1763. ++j;
  1764. }
  1765. }
  1766. else
  1767. {
  1768. // classification
  1769. int l = prob.l;
  1770. int[] tmp_nr_class = new int[1];
  1771. int[][] tmp_label = new int[1][];
  1772. int[][] tmp_start = new int[1][];
  1773. int[][] tmp_count = new int[1][];
  1774. int[] perm = new int[l];
  1775. // group training data of the same class
  1776. svm_group_classes(prob,tmp_nr_class,tmp_label,tmp_start,tmp_count,perm);
  1777. int nr_class = tmp_nr_class[0];
  1778. int[] label = tmp_label[0];
  1779. int[] start = tmp_start[0];
  1780. int[] count = tmp_count[0];
  1781. if(nr_class == 1)
  1782. svm.info("WARNING: training data in only one class. See README for details.\n");
  1783. svm_node[][] x = new svm_node[l][];
  1784. int i;
  1785. for(i=0;i<l;i++)
  1786. x[i] = prob.x[perm[i]];
  1787. // calculate weighted C
  1788. double[] weighted_C = new double[nr_class];
  1789. for(i=0;i<nr_class;i++)
  1790. weighted_C[i] = param.C;
  1791. for(i=0;i<param.nr_weight;i++)
  1792. {
  1793. int j;
  1794. for(j=0;j<nr_class;j++)
  1795. if(param.weight_label[i] == label[j])
  1796. break;
  1797. if(j == nr_class)
  1798. System.err.print("WARNING: class label "+param.weight_label[i]+" specified in weight is not found\n");
  1799. else
  1800. weighted_C[j] *= param.weight[i];
  1801. }
  1802. // train k*(k-1)/2 models
  1803. boolean[] nonzero = new boolean[l];
  1804. for(i=0;i<l;i++)
  1805. nonzero[i] = false;
  1806. decision_function[] f = new decision_function[nr_class*(nr_class-1)/2];
  1807. double[] probA=null,probB=null;
  1808. if (param.probability == 1)
  1809. {
  1810. probA=new double[nr_class*(nr_class-1)/2];
  1811. probB=new double[nr_class*(nr_class-1)/2];
  1812. }
  1813. int p = 0;
  1814. for(i=0;i<nr_class;i++)
  1815. for(int j=i+1;j<nr_class;j++)
  1816. {
  1817. svm_problem sub_prob = new svm_problem();
  1818. int si = start[i], sj = start[j];
  1819. int ci = count[i], cj = count[j];
  1820. sub_prob.l = ci+cj;
  1821. sub_prob.x = new svm_node[sub_prob.l][];
  1822. sub_prob.y = new double[sub_prob.l];
  1823. int k;
  1824. for(k=0;k<ci;k++)
  1825. {
  1826. sub_prob.x[k] = x[si+k];
  1827. sub_prob.y[k] = +1;
  1828. }
  1829. for(k=0;k<cj;k++)
  1830. {
  1831. sub_prob.x[ci+k] = x[sj+k];
  1832. sub_prob.y[ci+k] = -1;
  1833. }
  1834. if(param.probability == 1)
  1835. {
  1836. double[] probAB=new double[2];
  1837. svm_binary_svc_probability(sub_prob,param,weighted_C[i],weighted_C[j],probAB);
  1838. probA[p]=probAB[0];
  1839. probB[p]=probAB[1];
  1840. }
  1841. f[p] = svm_train_one(sub_prob,param,weighted_C[i],weighted_C[j]);
  1842. for(k=0;k<ci;k++)
  1843. if(!nonzero[si+k] && Math.abs(f[p].alpha[k]) > 0)
  1844. nonzero[si+k] = true;
  1845. for(k=0;k<cj;k++)
  1846. if(!nonzero[sj+k] && Math.abs(f[p].alpha[ci+k]) > 0)
  1847. nonzero[sj+k] = true;
  1848. ++p;
  1849. }
  1850. // build output
  1851. model.nr_class = nr_class;
  1852. model.label = new int[nr_class];
  1853. for(i=0;i<nr_class;i++)
  1854. model.label[i] = label[i];
  1855. model.rho = new double[nr_class*(nr_class-1)/2];
  1856. for(i=0;i<nr_class*(nr_class-1)/2;i++)
  1857. model.rho[i] = f[i].rho;
  1858. if(param.probability == 1)
  1859. {
  1860. model.probA = new double[nr_class*(nr_class-1)/2];
  1861. model.probB = new double[nr_class*(nr_class-1)/2];
  1862. for(i=0;i<nr_class*(nr_class-1)/2;i++)
  1863. {
  1864. model.probA[i] = probA[i];
  1865. model.probB[i] = probB[i];
  1866. }
  1867. }
  1868. else
  1869. {
  1870. model.probA=null;
  1871. model.probB=null;
  1872. }
  1873. int nnz = 0;
  1874. int[] nz_count = new int[nr_class];
  1875. model.nSV = new int[nr_class];
  1876. for(i=0;i<nr_class;i++)
  1877. {
  1878. int nSV = 0;
  1879. for(int j=0;j<count[i];j++)
  1880. if(nonzero[start[i]+j])
  1881. {
  1882. ++nSV;
  1883. ++nnz;
  1884. }
  1885. model.nSV[i] = nSV;
  1886. nz_count[i] = nSV;
  1887. }
  1888. svm.info("Total nSV = "+nnz+"\n");
  1889. model.l = nnz;
  1890. model.SV = new svm_node[nnz][];
  1891. model.sv_indices = new int[nnz];
  1892. p = 0;
  1893. for(i=0;i<l;i++)
  1894. if(nonzero[i])
  1895. {
  1896. model.SV[p] = x[i];
  1897. model.sv_indices[p++] = perm[i] + 1;
  1898. }
  1899. int[] nz_start = new int[nr_class];
  1900. nz_start[0] = 0;
  1901. for(i=1;i<nr_class;i++)
  1902. nz_start[i] = nz_start[i-1]+nz_count[i-1];
  1903. model.sv_coef = new double[nr_class-1][];
  1904. for(i=0;i<nr_class-1;i++)
  1905. model.sv_coef[i] = new double[nnz];
  1906. p = 0;
  1907. for(i=0;i<nr_class;i++)
  1908. for(int j=i+1;j<nr_class;j++)
  1909. {
  1910. // classifier (i,j): coefficients with
  1911. // i are in sv_coef[j-1][nz_start[i]...],
  1912. // j are in sv_coef[i][nz_start[j]...]
  1913. int si = start[i];
  1914. int sj = start[j];
  1915. int ci = count[i];
  1916. int cj = count[j];
  1917. int q = nz_start[i];
  1918. int k;
  1919. for(k=0;k<ci;k++)
  1920. if(nonzero[si+k])
  1921. model.sv_coef[j-1][q++] = f[p].alpha[k];
  1922. q = nz_start[j];
  1923. for(k=0;k<cj;k++)
  1924. if(nonzero[sj+k])
  1925. model.sv_coef[i][q++] = f[p].alpha[ci+k];
  1926. ++p;
  1927. }
  1928. }
  1929. return model;
  1930. }
  1931. // Stratified cross validation
  1932. public static void svm_cross_validation(svm_problem prob, svm_parameter param, int nr_fold, double[] target)
  1933. {
  1934. int i;
  1935. int[] fold_start = new int[nr_fold+1];
  1936. int l = prob.l;
  1937. int[] perm = new int[l];
  1938. // stratified cv may not give leave-one-out rate
  1939. // Each class to l folds -> some folds may have zero elements
  1940. if((param.svm_type == svm_parameter.C_SVC ||
  1941. param.svm_type == svm_parameter.NU_SVC) && nr_fold < l)
  1942. {
  1943. int[] tmp_nr_class = new int[1];
  1944. int[][] tmp_label = new int[1][];
  1945. int[][] tmp_start = new int[1][];
  1946. int[][] tmp_count = new int[1][];
  1947. svm_group_classes(prob,tmp_nr_class,tmp_label,tmp_start,tmp_count,perm);
  1948. int nr_class = tmp_nr_class[0];
  1949. int[] start = tmp_start[0];
  1950. int[] count = tmp_count[0];
  1951. // random shuffle and then data grouped by fold using the array perm
  1952. int[] fold_count = new int[nr_fold];
  1953. int c;
  1954. int[] index = new int[l];
  1955. for(i=0;i<l;i++)
  1956. index[i]=perm[i];
  1957. for (c=0; c<nr_class; c++)
  1958. for(i=0;i<count[c];i++)
  1959. {
  1960. int j = i+rand.nextInt(count[c]-i);
  1961. do {int tmp=index[start[c]+j]; index[start[c]+j]=index[start[c]+i]; index[start[c]+i]=tmp;} while(false);
  1962. }
  1963. for(i=0;i<nr_fold;i++)
  1964. {
  1965. fold_count[i] = 0;
  1966. for (c=0; c<nr_class;c++)
  1967. fold_count[i]+=(i+1)*count[c]/nr_fold-i*count[c]/nr_fold;
  1968. }
  1969. fold_start[0]=0;
  1970. for (i=1;i<=nr_fold;i++)
  1971. fold_start[i] = fold_start[i-1]+fold_count[i-1];
  1972. for (c=0; c<nr_class;c++)
  1973. for(i=0;i<nr_fold;i++)
  1974. {
  1975. int begin = start[c]+i*count[c]/nr_fold;
  1976. int end = start[c]+(i+1)*count[c]/nr_fold;
  1977. for(int j=begin;j<end;j++)
  1978. {
  1979. perm[fold_start[i]] = index[j];
  1980. fold_start[i]++;
  1981. }
  1982. }
  1983. fold_start[0]=0;
  1984. for (i=1;i<=nr_fold;i++)
  1985. fold_start[i] = fold_start[i-1]+fold_count[i-1];
  1986. }
  1987. else
  1988. {
  1989. for(i=0;i<l;i++) perm[i]=i;
  1990. for(i=0;i<l;i++)
  1991. {
  1992. int j = i+rand.nextInt(l-i);
  1993. do {int tmp=perm[i]; perm[i]=perm[j]; perm[j]=tmp;} while(false);
  1994. }
  1995. for(i=0;i<=nr_fold;i++)
  1996. fold_start[i]=i*l/nr_fold;
  1997. }
  1998. for(i=0;i<nr_fold;i++)
  1999. {
  2000. int begin = fold_start[i];
  2001. int end = fold_start[i+1];
  2002. int j,k;
  2003. svm_problem subprob = new svm_problem();
  2004. subprob.l = l-(end-begin);
  2005. subprob.x = new svm_node[subprob.l][];
  2006. subprob.y = new double[subprob.l];
  2007. k=0;
  2008. for(j=0;j<begin;j++)
  2009. {
  2010. subprob.x[k] = prob.x[perm[j]];
  2011. subprob.y[k] = prob.y[perm[j]];
  2012. ++k;
  2013. }
  2014. for(j=end;j<l;j++)
  2015. {
  2016. subprob.x[k] = prob.x[perm[j]];
  2017. subprob.y[k] = prob.y[perm[j]];
  2018. ++k;
  2019. }
  2020. svm_model submodel = svm_train(subprob,param);
  2021. if(param.probability==1 &&
  2022. (param.svm_type == svm_parameter.C_SVC ||
  2023. param.svm_type == svm_parameter.NU_SVC))
  2024. {
  2025. double[] prob_estimates= new double[svm_get_nr_class(submodel)];
  2026. for(j=begin;j<end;j++)
  2027. target[perm[j]] = svm_predict_probability(submodel,prob.x[perm[j]],prob_estimates);
  2028. }
  2029. else
  2030. for(j=begin;j<end;j++)
  2031. target[perm[j]] = svm_predict(submodel,prob.x[perm[j]]);
  2032. }
  2033. }
  2034. public static int svm_get_svm_type(svm_model model)
  2035. {
  2036. return model.param.svm_type;
  2037. }
  2038. public static int svm_get_nr_class(svm_model model)
  2039. {
  2040. return model.nr_class;
  2041. }
  2042. public static void svm_get_labels(svm_model model, int[] label)
  2043. {
  2044. if (model.label != null)
  2045. for(int i=0;i<model.nr_class;i++)
  2046. label[i] = model.label[i];
  2047. }
  2048. public static void svm_get_sv_indices(svm_model model, int[] indices)
  2049. {
  2050. if (model.sv_indices != null)
  2051. for(int i=0;i<model.l;i++)
  2052. indices[i] = model.sv_indices[i];
  2053. }
  2054. public static int svm_get_nr_sv(svm_model model)
  2055. {
  2056. return model.l;
  2057. }
  2058. public static double svm_get_svr_probability(svm_model model)
  2059. {
  2060. if ((model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR) &&
  2061. model.probA!=null)
  2062. return model.probA[0];
  2063. else
  2064. {
  2065. System.err.print("Model doesn't contain information for SVR probability inference\n");
  2066. return 0;
  2067. }
  2068. }
  2069. public static double svm_predict_values(svm_model model, svm_node[] x, double[] dec_values)
  2070. {
  2071. int i;
  2072. if(model.param.svm_type == svm_parameter.ONE_CLASS ||
  2073. model.param.svm_type == svm_parameter.EPSILON_SVR ||
  2074. model.param.svm_type == svm_parameter.NU_SVR)
  2075. {
  2076. double[] sv_coef = model.sv_coef[0];
  2077. double sum = 0;
  2078. for(i=0;i<model.l;i++)
  2079. sum += sv_coef[i] * Kernel.k_function(x,model.SV[i],model.param);
  2080. sum -= model.rho[0];
  2081. dec_values[0] = sum;
  2082. if(model.param.svm_type == svm_parameter.ONE_CLASS)
  2083. return (sum>0)?1:-1;
  2084. else
  2085. return sum;
  2086. }
  2087. else
  2088. {
  2089. int nr_class = model.nr_class;
  2090. int l = model.l;
  2091. double[] kvalue = new double[l];
  2092. for(i=0;i<l;i++)
  2093. kvalue[i] = Kernel.k_function(x,model.SV[i],model.param);
  2094. int[] start = new int[nr_class];
  2095. start[0] = 0;
  2096. for(i=1;i<nr_class;i++)
  2097. start[i] = start[i-1]+model.nSV[i-1];
  2098. int[] vote = new int[nr_class];
  2099. for(i=0;i<nr_class;i++)
  2100. vote[i] = 0;
  2101. int p=0;
  2102. for(i=0;i<nr_class;i++)
  2103. for(int j=i+1;j<nr_class;j++)
  2104. {
  2105. double sum = 0;
  2106. int si = start[i];
  2107. int sj = start[j];
  2108. int ci = model.nSV[i];
  2109. int cj = model.nSV[j];
  2110. int k;
  2111. double[] coef1 = model.sv_coef[j-1];
  2112. double[] coef2 = model.sv_coef[i];
  2113. for(k=0;k<ci;k++)
  2114. sum += coef1[si+k] * kvalue[si+k];
  2115. for(k=0;k<cj;k++)
  2116. sum += coef2[sj+k] * kvalue[sj+k];
  2117. sum -= model.rho[p];
  2118. dec_values[p] = sum;
  2119. if(dec_values[p] > 0)
  2120. ++vote[i];
  2121. else
  2122. ++vote[j];
  2123. p++;
  2124. }
  2125. int vote_max_idx = 0;
  2126. for(i=1;i<nr_class;i++)
  2127. if(vote[i] > vote[vote_max_idx])
  2128. vote_max_idx = i;
  2129. return model.label[vote_max_idx];
  2130. }
  2131. }
  2132. public static double svm_predict(svm_model model, svm_node[] x)
  2133. {
  2134. int nr_class = model.nr_class;
  2135. double[] dec_values;
  2136. if(model.param.svm_type == svm_parameter.ONE_CLASS ||
  2137. model.param.svm_type == svm_parameter.EPSILON_SVR ||
  2138. model.param.svm_type == svm_parameter.NU_SVR)
  2139. dec_values = new double[1];
  2140. else
  2141. dec_values = new double[nr_class*(nr_class-1)/2];
  2142. double pred_result = svm_predict_values(model, x, dec_values);
  2143. return pred_result;
  2144. }
  2145. public static double svm_predict_probability(svm_model model, svm_node[] x, double[] prob_estimates)
  2146. {
  2147. if ((model.param.svm_type == svm_parameter.C_SVC || model.param.svm_type == svm_parameter.NU_SVC) &&
  2148. model.probA!=null && model.probB!=null)
  2149. {
  2150. int i;
  2151. int nr_class = model.nr_class;
  2152. double[] dec_values = new double[nr_class*(nr_class-1)/2];
  2153. svm_predict_values(model, x, dec_values);
  2154. double min_prob=1e-7;
  2155. double[][] pairwise_prob=new double[nr_class][nr_class];
  2156. int k=0;
  2157. for(i=0;i<nr_class;i++)
  2158. for(int j=i+1;j<nr_class;j++)
  2159. {
  2160. pairwise_prob[i][j]=Math.min(Math.max(sigmoid_predict(dec_values[k],model.probA[k],model.probB[k]),min_prob),1-min_prob);
  2161. pairwise_prob[j][i]=1-pairwise_prob[i][j];
  2162. k++;
  2163. }
  2164. if (nr_class == 2)
  2165. {
  2166. prob_estimates[0] = pairwise_prob[0][1];
  2167. prob_estimates[1] = pairwise_prob[1][0];
  2168. }
  2169. else
  2170. multiclass_probability(nr_class,pairwise_prob,prob_estimates);
  2171. int prob_max_idx = 0;
  2172. for(i=1;i<nr_class;i++)
  2173. if(prob_estimates[i] > prob_estimates[prob_max_idx])
  2174. prob_max_idx = i;
  2175. return model.label[prob_max_idx];
  2176. }
  2177. else
  2178. return svm_predict(model, x);
  2179. }
  2180. static final String svm_type_table[] =
  2181. {
  2182. "c_svc","nu_svc","one_class","epsilon_svr","nu_svr",
  2183. };
  2184. static final String kernel_type_table[]=
  2185. {
  2186. "linear","polynomial","rbf","sigmoid","precomputed"
  2187. };
  2188. public static void svm_save_model(String model_file_name, svm_model model) throws IOException
  2189. {
  2190. DataOutputStream fp = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(model_file_name)));
  2191. svm_parameter param = model.param;
  2192. fp.writeBytes("svm_type "+svm_type_table[param.svm_type]+"\n");
  2193. fp.writeBytes("kernel_type "+kernel_type_table[param.kernel_type]+"\n");
  2194. if(param.kernel_type == svm_parameter.POLY)
  2195. fp.writeBytes("degree "+param.degree+"\n");
  2196. if(param.kernel_type == svm_parameter.POLY ||
  2197. param.kernel_type == svm_parameter.RBF ||
  2198. param.kernel_type == svm_parameter.SIGMOID)
  2199. fp.writeBytes("gamma "+param.gamma+"\n");
  2200. if(param.kernel_type == svm_parameter.POLY ||
  2201. param.kernel_type == svm_parameter.SIGMOID)
  2202. fp.writeBytes("coef0 "+param.coef0+"\n");
  2203. int nr_class = model.nr_class;
  2204. int l = model.l;
  2205. fp.writeBytes("nr_class "+nr_class+"\n");
  2206. fp.writeBytes("total_sv "+l+"\n");
  2207. {
  2208. fp.writeBytes("rho");
  2209. for(int i=0;i<nr_class*(nr_class-1)/2;i++)
  2210. fp.writeBytes(" "+model.rho[i]);
  2211. fp.writeBytes("\n");
  2212. }
  2213. if(model.label != null)
  2214. {
  2215. fp.writeBytes("label");
  2216. for(int i=0;i<nr_class;i++)
  2217. fp.writeBytes(" "+model.label[i]);
  2218. fp.writeBytes("\n");
  2219. }
  2220. if(model.probA != null) // regression has probA only
  2221. {
  2222. fp.writeBytes("probA");
  2223. for(int i=0;i<nr_class*(nr_class-1)/2;i++)
  2224. fp.writeBytes(" "+model.probA[i]);
  2225. fp.writeBytes("\n");
  2226. }
  2227. if(model.probB != null)
  2228. {
  2229. fp.writeBytes("probB");
  2230. for(int i=0;i<nr_class*(nr_class-1)/2;i++)
  2231. fp.writeBytes(" "+model.probB[i]);
  2232. fp.writeBytes("\n");
  2233. }
  2234. if(model.nSV != null)
  2235. {
  2236. fp.writeBytes("nr_sv");
  2237. for(int i=0;i<nr_class;i++)
  2238. fp.writeBytes(" "+model.nSV[i]);
  2239. fp.writeBytes("\n");
  2240. }
  2241. fp.writeBytes("SV\n");
  2242. double[][] sv_coef = model.sv_coef;
  2243. svm_node[][] SV = model.SV;
  2244. for(int i=0;i<l;i++)
  2245. {
  2246. for(int j=0;j<nr_class-1;j++)
  2247. fp.writeBytes(sv_coef[j][i]+" ");
  2248. svm_node[] p = SV[i];
  2249. if(param.kernel_type == svm_parameter.PRECOMPUTED)
  2250. fp.writeBytes("0:"+(int)(p[0].value));
  2251. else
  2252. for(int j=0;j<p.length;j++)
  2253. fp.writeBytes(p[j].index+":"+p[j].value+" ");
  2254. fp.writeBytes("\n");
  2255. }
  2256. fp.close();
  2257. }
  2258. private static double atof(String s)
  2259. {
  2260. return Double.valueOf(s).doubleValue();
  2261. }
  2262. private static int atoi(String s)
  2263. {
  2264. return Integer.parseInt(s);
  2265. }
  2266. private static boolean read_model_header(BufferedReader fp, svm_model model)
  2267. {
  2268. svm_parameter param = new svm_parameter();
  2269. model.param = param;
  2270. // parameters for training only won't be assigned, but arrays are assigned as NULL for safety
  2271. param.nr_weight = 0;
  2272. param.weight_label = null;
  2273. param.weight = null;
  2274. try
  2275. {
  2276. while(true)
  2277. {
  2278. String cmd = fp.readLine();
  2279. String arg = cmd.substring(cmd.indexOf(' ')+1);
  2280. if(cmd.startsWith("svm_type"))
  2281. {
  2282. int i;
  2283. for(i=0;i<svm_type_table.length;i++)
  2284. {
  2285. if(arg.indexOf(svm_type_table[i])!=-1)
  2286. {
  2287. param.svm_type=i;
  2288. break;
  2289. }
  2290. }
  2291. if(i == svm_type_table.length)
  2292. {
  2293. System.err.print("unknown svm type.\n");
  2294. return false;
  2295. }
  2296. }
  2297. else if(cmd.startsWith("kernel_type"))
  2298. {
  2299. int i;
  2300. for(i=0;i<kernel_type_table.length;i++)
  2301. {
  2302. if(arg.indexOf(kernel_type_table[i])!=-1)
  2303. {
  2304. param.kernel_type=i;
  2305. break;
  2306. }
  2307. }
  2308. if(i == kernel_type_table.length)
  2309. {
  2310. System.err.print("unknown kernel function.\n");
  2311. return false;
  2312. }
  2313. }
  2314. else if(cmd.startsWith("degree"))
  2315. param.degree = atoi(arg);
  2316. else if(cmd.startsWith("gamma"))
  2317. param.gamma = atof(arg);
  2318. else if(cmd.startsWith("coef0"))
  2319. param.coef0 = atof(arg);
  2320. else if(cmd.startsWith("nr_class"))
  2321. model.nr_class = atoi(arg);
  2322. else if(cmd.startsWith("total_sv"))
  2323. model.l = atoi(arg);
  2324. else if(cmd.startsWith("rho"))
  2325. {
  2326. int n = model.nr_class * (model.nr_class-1)/2;
  2327. model.rho = new double[n];
  2328. StringTokenizer st = new StringTokenizer(arg);
  2329. for(int i=0;i<n;i++)
  2330. model.rho[i] = atof(st.nextToken());
  2331. }
  2332. else if(cmd.startsWith("label"))
  2333. {
  2334. int n = model.nr_class;
  2335. model.label = new int[n];
  2336. StringTokenizer st = new StringTokenizer(arg);
  2337. for(int i=0;i<n;i++)
  2338. model.label[i] = atoi(st.nextToken());
  2339. }
  2340. else if(cmd.startsWith("probA"))
  2341. {
  2342. int n = model.nr_class*(model.nr_class-1)/2;
  2343. model.probA = new double[n];
  2344. StringTokenizer st = new StringTokenizer(arg);
  2345. for(int i=0;i<n;i++)
  2346. model.probA[i] = atof(st.nextToken());
  2347. }
  2348. else if(cmd.startsWith("probB"))
  2349. {
  2350. int n = model.nr_class*(model.nr_class-1)/2;
  2351. model.probB = new double[n];
  2352. StringTokenizer st = new StringTokenizer(arg);
  2353. for(int i=0;i<n;i++)
  2354. model.probB[i] = atof(st.nextToken());
  2355. }
  2356. else if(cmd.startsWith("nr_sv"))
  2357. {
  2358. int n = model.nr_class;
  2359. model.nSV = new int[n];
  2360. StringTokenizer st = new StringTokenizer(arg);
  2361. for(int i=0;i<n;i++)
  2362. model.nSV[i] = atoi(st.nextToken());
  2363. }
  2364. else if(cmd.startsWith("SV"))
  2365. {
  2366. break;
  2367. }
  2368. else
  2369. {
  2370. System.err.print("unknown text in model file: ["+cmd+"]\n");
  2371. return false;
  2372. }
  2373. }
  2374. }
  2375. catch(Exception e)
  2376. {
  2377. return false;
  2378. }
  2379. return true;
  2380. }
  2381. public static svm_model svm_load_model(String model_file_name) throws IOException
  2382. {
  2383. return svm_load_model(new BufferedReader(new FileReader(model_file_name)));
  2384. }
  2385. public static svm_model svm_load_model(BufferedReader fp) throws IOException
  2386. {
  2387. // read parameters
  2388. svm_model model = new svm_model();
  2389. model.rho = null;
  2390. model.probA = null;
  2391. model.probB = null;
  2392. model.label = null;
  2393. model.nSV = null;
  2394. if (read_model_header(fp, model) == false)
  2395. {
  2396. System.err.print("ERROR: failed to read model\n");
  2397. return null;
  2398. }
  2399. // read sv_coef and SV
  2400. int m = model.nr_class - 1;
  2401. int l = model.l;
  2402. model.sv_coef = new double[m][l];
  2403. model.SV = new svm_node[l][];
  2404. for(int i=0;i<l;i++)
  2405. {
  2406. String line = fp.readLine();
  2407. StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
  2408. for(int k=0;k<m;k++)
  2409. model.sv_coef[k][i] = atof(st.nextToken());
  2410. int n = st.countTokens()/2;
  2411. model.SV[i] = new svm_node[n];
  2412. for(int j=0;j<n;j++)
  2413. {
  2414. model.SV[i][j] = new svm_node();
  2415. model.SV[i][j].index = atoi(st.nextToken());
  2416. model.SV[i][j].value = atof(st.nextToken());
  2417. }
  2418. }
  2419. fp.close();
  2420. return model;
  2421. }
  2422. public static String svm_check_parameter(svm_problem prob, svm_parameter param)
  2423. {
  2424. // svm_type
  2425. int svm_type = param.svm_type;
  2426. if(svm_type != svm_parameter.C_SVC &&
  2427. svm_type != svm_parameter.NU_SVC &&
  2428. svm_type != svm_parameter.ONE_CLASS &&
  2429. svm_type != svm_parameter.EPSILON_SVR &&
  2430. svm_type != svm_parameter.NU_SVR)
  2431. return "unknown svm type";
  2432. // kernel_type, degree
  2433. int kernel_type = param.kernel_type;
  2434. if(kernel_type != svm_parameter.LINEAR &&
  2435. kernel_type != svm_parameter.POLY &&
  2436. kernel_type != svm_parameter.RBF &&
  2437. kernel_type != svm_parameter.SIGMOID &&
  2438. kernel_type != svm_parameter.PRECOMPUTED)
  2439. return "unknown kernel type";
  2440. if(param.gamma < 0)
  2441. return "gamma < 0";
  2442. if(param.degree < 0)
  2443. return "degree of polynomial kernel < 0";
  2444. // cache_size,eps,C,nu,p,shrinking
  2445. if(param.cache_size <= 0)
  2446. return "cache_size <= 0";
  2447. if(param.eps <= 0)
  2448. return "eps <= 0";
  2449. if(svm_type == svm_parameter.C_SVC ||
  2450. svm_type == svm_parameter.EPSILON_SVR ||
  2451. svm_type == svm_parameter.NU_SVR)
  2452. if(param.C <= 0)
  2453. return "C <= 0";
  2454. if(svm_type == svm_parameter.NU_SVC ||
  2455. svm_type == svm_parameter.ONE_CLASS ||
  2456. svm_type == svm_parameter.NU_SVR)
  2457. if(param.nu <= 0 || param.nu > 1)
  2458. return "nu <= 0 or nu > 1";
  2459. if(svm_type == svm_parameter.EPSILON_SVR)
  2460. if(param.p < 0)
  2461. return "p < 0";
  2462. if(param.shrinking != 0 &&
  2463. param.shrinking != 1)
  2464. return "shrinking != 0 and shrinking != 1";
  2465. if(param.probability != 0 &&
  2466. param.probability != 1)
  2467. return "probability != 0 and probability != 1";
  2468. if(param.probability == 1 &&
  2469. svm_type == svm_parameter.ONE_CLASS)
  2470. return "one-class SVM probability output not supported yet";
  2471. // check whether nu-svc is feasible
  2472. if(svm_type == svm_parameter.NU_SVC)
  2473. {
  2474. int l = prob.l;
  2475. int max_nr_class = 16;
  2476. int nr_class = 0;
  2477. int[] label = new int[max_nr_class];
  2478. int[] count = new int[max_nr_class];
  2479. int i;
  2480. for(i=0;i<l;i++)
  2481. {
  2482. int this_label = (int)prob.y[i];
  2483. int j;
  2484. for(j=0;j<nr_class;j++)
  2485. if(this_label == label[j])
  2486. {
  2487. ++count[j];
  2488. break;
  2489. }
  2490. if(j == nr_class)
  2491. {
  2492. if(nr_class == max_nr_class)
  2493. {
  2494. max_nr_class *= 2;
  2495. int[] new_data = new int[max_nr_class];
  2496. System.arraycopy(label,0,new_data,0,label.length);
  2497. label = new_data;
  2498. new_data = new int[max_nr_class];
  2499. System.arraycopy(count,0,new_data,0,count.length);
  2500. count = new_data;
  2501. }
  2502. label[nr_class] = this_label;
  2503. count[nr_class] = 1;
  2504. ++nr_class;
  2505. }
  2506. }
  2507. for(i=0;i<nr_class;i++)
  2508. {
  2509. int n1 = count[i];
  2510. for(int j=i+1;j<nr_class;j++)
  2511. {
  2512. int n2 = count[j];
  2513. if(param.nu*(n1+n2)/2 > Math.min(n1,n2))
  2514. return "specified nu is infeasible";
  2515. }
  2516. }
  2517. }
  2518. return null;
  2519. }
  2520. public static int svm_check_probability_model(svm_model model)
  2521. {
  2522. if (((model.param.svm_type == svm_parameter.C_SVC || model.param.svm_type == svm_parameter.NU_SVC) &&
  2523. model.probA!=null && model.probB!=null) ||
  2524. ((model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR) &&
  2525. model.probA!=null))
  2526. return 1;
  2527. else
  2528. return 0;
  2529. }
  2530. public static void svm_set_print_string_function(svm_print_interface print_func)
  2531. {
  2532. if (print_func == null)
  2533. svm_print_string = svm_print_stdout;
  2534. else
  2535. svm_print_string = print_func;
  2536. }
  2537. }

A Python package for graph kernels, graph edit distances and graph pre-image problem.