You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

svm.m4 63 kB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860
  1. define(`swap',`do {$1 tmp=$2; $2=$3; $3=tmp;} while(false)')
  2. define(`Qfloat',`float')
  3. define(`SIZE_OF_QFLOAT',4)
  4. define(`TAU',1e-12)
  5. changecom(`//',`')
  6. package libsvm;
  7. import java.io.*;
  8. import java.util.*;
  9. //
  10. // Kernel Cache
  11. //
  12. // l is the number of total data items
  13. // size is the cache size limit in bytes
  14. //
  15. class Cache {
  16. private final int l;
  17. private long size;
  18. private final class head_t
  19. {
  20. head_t prev, next; // a cicular list
  21. Qfloat[] data;
  22. int len; // data[0,len) is cached in this entry
  23. }
  24. private final head_t[] head;
  25. private head_t lru_head;
  26. Cache(int l_, long size_)
  27. {
  28. l = l_;
  29. size = size_;
  30. head = new head_t[l];
  31. for(int i=0;i<l;i++) head[i] = new head_t();
  32. size /= SIZE_OF_QFLOAT;
  33. size -= l * (16/SIZE_OF_QFLOAT); // sizeof(head_t) == 16
  34. size = Math.max(size, 2* (long) l); // cache must be large enough for two columns
  35. lru_head = new head_t();
  36. lru_head.next = lru_head.prev = lru_head;
  37. }
  38. private void lru_delete(head_t h)
  39. {
  40. // delete from current location
  41. h.prev.next = h.next;
  42. h.next.prev = h.prev;
  43. }
  44. private void lru_insert(head_t h)
  45. {
  46. // insert to last position
  47. h.next = lru_head;
  48. h.prev = lru_head.prev;
  49. h.prev.next = h;
  50. h.next.prev = h;
  51. }
  52. // request data [0,len)
  53. // return some position p where [p,len) need to be filled
  54. // (p >= len if nothing needs to be filled)
  55. // java: simulate pointer using single-element array
  56. int get_data(int index, Qfloat[][] data, int len)
  57. {
  58. head_t h = head[index];
  59. if(h.len > 0) lru_delete(h);
  60. int more = len - h.len;
  61. if(more > 0)
  62. {
  63. // free old space
  64. while(size < more)
  65. {
  66. head_t old = lru_head.next;
  67. lru_delete(old);
  68. size += old.len;
  69. old.data = null;
  70. old.len = 0;
  71. }
  72. // allocate new space
  73. Qfloat[] new_data = new Qfloat[len];
  74. if(h.data != null) System.arraycopy(h.data,0,new_data,0,h.len);
  75. h.data = new_data;
  76. size -= more;
  77. swap(int,h.len,len);
  78. }
  79. lru_insert(h);
  80. data[0] = h.data;
  81. return len;
  82. }
  83. void swap_index(int i, int j)
  84. {
  85. if(i==j) return;
  86. if(head[i].len > 0) lru_delete(head[i]);
  87. if(head[j].len > 0) lru_delete(head[j]);
  88. swap(Qfloat[],head[i].data,head[j].data);
  89. swap(int,head[i].len,head[j].len);
  90. if(head[i].len > 0) lru_insert(head[i]);
  91. if(head[j].len > 0) lru_insert(head[j]);
  92. if(i>j) swap(int,i,j);
  93. for(head_t h = lru_head.next; h!=lru_head; h=h.next)
  94. {
  95. if(h.len > i)
  96. {
  97. if(h.len > j)
  98. swap(Qfloat,h.data[i],h.data[j]);
  99. else
  100. {
  101. // give up
  102. lru_delete(h);
  103. size += h.len;
  104. h.data = null;
  105. h.len = 0;
  106. }
  107. }
  108. }
  109. }
  110. }
  111. //
  112. // Kernel evaluation
  113. //
  114. // the static method k_function is for doing single kernel evaluation
  115. // the constructor of Kernel prepares to calculate the l*l kernel matrix
  116. // the member function get_Q is for getting one column from the Q Matrix
  117. //
  118. abstract class QMatrix {
  119. abstract Qfloat[] get_Q(int column, int len);
  120. abstract double[] get_QD();
  121. abstract void swap_index(int i, int j);
  122. };
  123. abstract class Kernel extends QMatrix {
  124. private svm_node[][] x;
  125. private final double[] x_square;
  126. // svm_parameter
  127. private final int kernel_type;
  128. private final int degree;
  129. private final double gamma;
  130. private final double coef0;
  131. abstract Qfloat[] get_Q(int column, int len);
  132. abstract double[] get_QD();
  133. void swap_index(int i, int j)
  134. {
  135. swap(svm_node[],x[i],x[j]);
  136. if(x_square != null) swap(double,x_square[i],x_square[j]);
  137. }
  138. private static double powi(double base, int times)
  139. {
  140. double tmp = base, ret = 1.0;
  141. for(int t=times; t>0; t/=2)
  142. {
  143. if(t%2==1) ret*=tmp;
  144. tmp = tmp * tmp;
  145. }
  146. return ret;
  147. }
  148. double kernel_function(int i, int j)
  149. {
  150. switch(kernel_type)
  151. {
  152. case svm_parameter.LINEAR:
  153. return dot(x[i],x[j]);
  154. case svm_parameter.POLY:
  155. return powi(gamma*dot(x[i],x[j])+coef0,degree);
  156. case svm_parameter.RBF:
  157. return Math.exp(-gamma*(x_square[i]+x_square[j]-2*dot(x[i],x[j])));
  158. case svm_parameter.SIGMOID:
  159. return Math.tanh(gamma*dot(x[i],x[j])+coef0);
  160. case svm_parameter.PRECOMPUTED:
  161. return x[i][(int)(x[j][0].value)].value;
  162. default:
  163. return 0; // java
  164. }
  165. }
  166. Kernel(int l, svm_node[][] x_, svm_parameter param)
  167. {
  168. this.kernel_type = param.kernel_type;
  169. this.degree = param.degree;
  170. this.gamma = param.gamma;
  171. this.coef0 = param.coef0;
  172. x = (svm_node[][])x_.clone();
  173. if(kernel_type == svm_parameter.RBF)
  174. {
  175. x_square = new double[l];
  176. for(int i=0;i<l;i++)
  177. x_square[i] = dot(x[i],x[i]);
  178. }
  179. else x_square = null;
  180. }
  181. static double dot(svm_node[] x, svm_node[] y)
  182. {
  183. double sum = 0;
  184. int xlen = x.length;
  185. int ylen = y.length;
  186. int i = 0;
  187. int j = 0;
  188. while(i < xlen && j < ylen)
  189. {
  190. if(x[i].index == y[j].index)
  191. sum += x[i++].value * y[j++].value;
  192. else
  193. {
  194. if(x[i].index > y[j].index)
  195. ++j;
  196. else
  197. ++i;
  198. }
  199. }
  200. return sum;
  201. }
  202. static double k_function(svm_node[] x, svm_node[] y,
  203. svm_parameter param)
  204. {
  205. switch(param.kernel_type)
  206. {
  207. case svm_parameter.LINEAR:
  208. return dot(x,y);
  209. case svm_parameter.POLY:
  210. return powi(param.gamma*dot(x,y)+param.coef0,param.degree);
  211. case svm_parameter.RBF:
  212. {
  213. double sum = 0;
  214. int xlen = x.length;
  215. int ylen = y.length;
  216. int i = 0;
  217. int j = 0;
  218. while(i < xlen && j < ylen)
  219. {
  220. if(x[i].index == y[j].index)
  221. {
  222. double d = x[i++].value - y[j++].value;
  223. sum += d*d;
  224. }
  225. else if(x[i].index > y[j].index)
  226. {
  227. sum += y[j].value * y[j].value;
  228. ++j;
  229. }
  230. else
  231. {
  232. sum += x[i].value * x[i].value;
  233. ++i;
  234. }
  235. }
  236. while(i < xlen)
  237. {
  238. sum += x[i].value * x[i].value;
  239. ++i;
  240. }
  241. while(j < ylen)
  242. {
  243. sum += y[j].value * y[j].value;
  244. ++j;
  245. }
  246. return Math.exp(-param.gamma*sum);
  247. }
  248. case svm_parameter.SIGMOID:
  249. return Math.tanh(param.gamma*dot(x,y)+param.coef0);
  250. case svm_parameter.PRECOMPUTED:
  251. return x[(int)(y[0].value)].value;
  252. default:
  253. return 0; // java
  254. }
  255. }
  256. }
  257. // An SMO algorithm in Fan et al., JMLR 6(2005), p. 1889--1918
  258. // Solves:
  259. //
  260. // min 0.5(\alpha^T Q \alpha) + p^T \alpha
  261. //
  262. // y^T \alpha = \delta
  263. // y_i = +1 or -1
  264. // 0 <= alpha_i <= Cp for y_i = 1
  265. // 0 <= alpha_i <= Cn for y_i = -1
  266. //
  267. // Given:
  268. //
  269. // Q, p, y, Cp, Cn, and an initial feasible point \alpha
  270. // l is the size of vectors and matrices
  271. // eps is the stopping tolerance
  272. //
  273. // solution will be put in \alpha, objective value will be put in obj
  274. //
  275. class Solver {
  276. int active_size;
  277. byte[] y;
  278. double[] G; // gradient of objective function
  279. static final byte LOWER_BOUND = 0;
  280. static final byte UPPER_BOUND = 1;
  281. static final byte FREE = 2;
  282. byte[] alpha_status; // LOWER_BOUND, UPPER_BOUND, FREE
  283. double[] alpha;
  284. QMatrix Q;
  285. double[] QD;
  286. double eps;
  287. double Cp,Cn;
  288. double[] p;
  289. int[] active_set;
  290. double[] G_bar; // gradient, if we treat free variables as 0
  291. int l;
  292. boolean unshrink; // XXX
  293. static final double INF = java.lang.Double.POSITIVE_INFINITY;
  294. double get_C(int i)
  295. {
  296. return (y[i] > 0)? Cp : Cn;
  297. }
  298. void update_alpha_status(int i)
  299. {
  300. if(alpha[i] >= get_C(i))
  301. alpha_status[i] = UPPER_BOUND;
  302. else if(alpha[i] <= 0)
  303. alpha_status[i] = LOWER_BOUND;
  304. else alpha_status[i] = FREE;
  305. }
  306. boolean is_upper_bound(int i) { return alpha_status[i] == UPPER_BOUND; }
  307. boolean is_lower_bound(int i) { return alpha_status[i] == LOWER_BOUND; }
  308. boolean is_free(int i) { return alpha_status[i] == FREE; }
  309. // java: information about solution except alpha,
  310. // because we cannot return multiple values otherwise...
  311. static class SolutionInfo {
  312. double obj;
  313. double rho;
  314. double upper_bound_p;
  315. double upper_bound_n;
  316. double r; // for Solver_NU
  317. }
  318. void swap_index(int i, int j)
  319. {
  320. Q.swap_index(i,j);
  321. swap(byte, y[i],y[j]);
  322. swap(double, G[i],G[j]);
  323. swap(byte, alpha_status[i],alpha_status[j]);
  324. swap(double, alpha[i],alpha[j]);
  325. swap(double, p[i],p[j]);
  326. swap(int, active_set[i],active_set[j]);
  327. swap(double, G_bar[i],G_bar[j]);
  328. }
  329. void reconstruct_gradient()
  330. {
  331. // reconstruct inactive elements of G from G_bar and free variables
  332. if(active_size == l) return;
  333. int i,j;
  334. int nr_free = 0;
  335. for(j=active_size;j<l;j++)
  336. G[j] = G_bar[j] + p[j];
  337. for(j=0;j<active_size;j++)
  338. if(is_free(j))
  339. nr_free++;
  340. if(2*nr_free < active_size)
  341. svm.info("\nWARNING: using -h 0 may be faster\n");
  342. if (nr_free*l > 2*active_size*(l-active_size))
  343. {
  344. for(i=active_size;i<l;i++)
  345. {
  346. Qfloat[] Q_i = Q.get_Q(i,active_size);
  347. for(j=0;j<active_size;j++)
  348. if(is_free(j))
  349. G[i] += alpha[j] * Q_i[j];
  350. }
  351. }
  352. else
  353. {
  354. for(i=0;i<active_size;i++)
  355. if(is_free(i))
  356. {
  357. Qfloat[] Q_i = Q.get_Q(i,l);
  358. double alpha_i = alpha[i];
  359. for(j=active_size;j<l;j++)
  360. G[j] += alpha_i * Q_i[j];
  361. }
  362. }
  363. }
  364. void Solve(int l, QMatrix Q, double[] p_, byte[] y_,
  365. double[] alpha_, double Cp, double Cn, double eps, SolutionInfo si, int shrinking)
  366. {
  367. this.l = l;
  368. this.Q = Q;
  369. QD = Q.get_QD();
  370. p = (double[])p_.clone();
  371. y = (byte[])y_.clone();
  372. alpha = (double[])alpha_.clone();
  373. this.Cp = Cp;
  374. this.Cn = Cn;
  375. this.eps = eps;
  376. this.unshrink = false;
  377. // initialize alpha_status
  378. {
  379. alpha_status = new byte[l];
  380. for(int i=0;i<l;i++)
  381. update_alpha_status(i);
  382. }
  383. // initialize active set (for shrinking)
  384. {
  385. active_set = new int[l];
  386. for(int i=0;i<l;i++)
  387. active_set[i] = i;
  388. active_size = l;
  389. }
  390. // initialize gradient
  391. {
  392. G = new double[l];
  393. G_bar = new double[l];
  394. int i;
  395. for(i=0;i<l;i++)
  396. {
  397. G[i] = p[i];
  398. G_bar[i] = 0;
  399. }
  400. for(i=0;i<l;i++)
  401. if(!is_lower_bound(i))
  402. {
  403. Qfloat[] Q_i = Q.get_Q(i,l);
  404. double alpha_i = alpha[i];
  405. int j;
  406. for(j=0;j<l;j++)
  407. G[j] += alpha_i*Q_i[j];
  408. if(is_upper_bound(i))
  409. for(j=0;j<l;j++)
  410. G_bar[j] += get_C(i) * Q_i[j];
  411. }
  412. }
  413. // optimization step
  414. int iter = 0;
  415. int max_iter = Math.max(10000000, l>Integer.MAX_VALUE/100 ? Integer.MAX_VALUE : 100*l);
  416. int counter = Math.min(l,1000)+1;
  417. int[] working_set = new int[2];
  418. while(iter < max_iter)
  419. {
  420. // show progress and do shrinking
  421. if(--counter == 0)
  422. {
  423. counter = Math.min(l,1000);
  424. if(shrinking!=0) do_shrinking();
  425. svm.info(".");
  426. }
  427. if(select_working_set(working_set)!=0)
  428. {
  429. // reconstruct the whole gradient
  430. reconstruct_gradient();
  431. // reset active set size and check
  432. active_size = l;
  433. svm.info("*");
  434. if(select_working_set(working_set)!=0)
  435. break;
  436. else
  437. counter = 1; // do shrinking next iteration
  438. }
  439. int i = working_set[0];
  440. int j = working_set[1];
  441. ++iter;
  442. // update alpha[i] and alpha[j], handle bounds carefully
  443. Qfloat[] Q_i = Q.get_Q(i,active_size);
  444. Qfloat[] Q_j = Q.get_Q(j,active_size);
  445. double C_i = get_C(i);
  446. double C_j = get_C(j);
  447. double old_alpha_i = alpha[i];
  448. double old_alpha_j = alpha[j];
  449. if(y[i]!=y[j])
  450. {
  451. double quad_coef = QD[i]+QD[j]+2*Q_i[j];
  452. if (quad_coef <= 0)
  453. quad_coef = TAU;
  454. double delta = (-G[i]-G[j])/quad_coef;
  455. double diff = alpha[i] - alpha[j];
  456. alpha[i] += delta;
  457. alpha[j] += delta;
  458. if(diff > 0)
  459. {
  460. if(alpha[j] < 0)
  461. {
  462. alpha[j] = 0;
  463. alpha[i] = diff;
  464. }
  465. }
  466. else
  467. {
  468. if(alpha[i] < 0)
  469. {
  470. alpha[i] = 0;
  471. alpha[j] = -diff;
  472. }
  473. }
  474. if(diff > C_i - C_j)
  475. {
  476. if(alpha[i] > C_i)
  477. {
  478. alpha[i] = C_i;
  479. alpha[j] = C_i - diff;
  480. }
  481. }
  482. else
  483. {
  484. if(alpha[j] > C_j)
  485. {
  486. alpha[j] = C_j;
  487. alpha[i] = C_j + diff;
  488. }
  489. }
  490. }
  491. else
  492. {
  493. double quad_coef = QD[i]+QD[j]-2*Q_i[j];
  494. if (quad_coef <= 0)
  495. quad_coef = TAU;
  496. double delta = (G[i]-G[j])/quad_coef;
  497. double sum = alpha[i] + alpha[j];
  498. alpha[i] -= delta;
  499. alpha[j] += delta;
  500. if(sum > C_i)
  501. {
  502. if(alpha[i] > C_i)
  503. {
  504. alpha[i] = C_i;
  505. alpha[j] = sum - C_i;
  506. }
  507. }
  508. else
  509. {
  510. if(alpha[j] < 0)
  511. {
  512. alpha[j] = 0;
  513. alpha[i] = sum;
  514. }
  515. }
  516. if(sum > C_j)
  517. {
  518. if(alpha[j] > C_j)
  519. {
  520. alpha[j] = C_j;
  521. alpha[i] = sum - C_j;
  522. }
  523. }
  524. else
  525. {
  526. if(alpha[i] < 0)
  527. {
  528. alpha[i] = 0;
  529. alpha[j] = sum;
  530. }
  531. }
  532. }
  533. // update G
  534. double delta_alpha_i = alpha[i] - old_alpha_i;
  535. double delta_alpha_j = alpha[j] - old_alpha_j;
  536. for(int k=0;k<active_size;k++)
  537. {
  538. G[k] += Q_i[k]*delta_alpha_i + Q_j[k]*delta_alpha_j;
  539. }
  540. // update alpha_status and G_bar
  541. {
  542. boolean ui = is_upper_bound(i);
  543. boolean uj = is_upper_bound(j);
  544. update_alpha_status(i);
  545. update_alpha_status(j);
  546. int k;
  547. if(ui != is_upper_bound(i))
  548. {
  549. Q_i = Q.get_Q(i,l);
  550. if(ui)
  551. for(k=0;k<l;k++)
  552. G_bar[k] -= C_i * Q_i[k];
  553. else
  554. for(k=0;k<l;k++)
  555. G_bar[k] += C_i * Q_i[k];
  556. }
  557. if(uj != is_upper_bound(j))
  558. {
  559. Q_j = Q.get_Q(j,l);
  560. if(uj)
  561. for(k=0;k<l;k++)
  562. G_bar[k] -= C_j * Q_j[k];
  563. else
  564. for(k=0;k<l;k++)
  565. G_bar[k] += C_j * Q_j[k];
  566. }
  567. }
  568. }
  569. if(iter >= max_iter)
  570. {
  571. if(active_size < l)
  572. {
  573. // reconstruct the whole gradient to calculate objective value
  574. reconstruct_gradient();
  575. active_size = l;
  576. svm.info("*");
  577. }
  578. System.err.print("\nWARNING: reaching max number of iterations\n");
  579. }
  580. // calculate rho
  581. si.rho = calculate_rho();
  582. // calculate objective value
  583. {
  584. double v = 0;
  585. int i;
  586. for(i=0;i<l;i++)
  587. v += alpha[i] * (G[i] + p[i]);
  588. si.obj = v/2;
  589. }
  590. // put back the solution
  591. {
  592. for(int i=0;i<l;i++)
  593. alpha_[active_set[i]] = alpha[i];
  594. }
  595. si.upper_bound_p = Cp;
  596. si.upper_bound_n = Cn;
  597. svm.info("\noptimization finished, #iter = "+iter+"\n");
  598. }
  599. // return 1 if already optimal, return 0 otherwise
  600. int select_working_set(int[] working_set)
  601. {
  602. // return i,j such that
  603. // i: maximizes -y_i * grad(f)_i, i in I_up(\alpha)
  604. // j: mimimizes the decrease of obj value
  605. // (if quadratic coefficeint <= 0, replace it with tau)
  606. // -y_j*grad(f)_j < -y_i*grad(f)_i, j in I_low(\alpha)
  607. double Gmax = -INF;
  608. double Gmax2 = -INF;
  609. int Gmax_idx = -1;
  610. int Gmin_idx = -1;
  611. double obj_diff_min = INF;
  612. for(int t=0;t<active_size;t++)
  613. if(y[t]==+1)
  614. {
  615. if(!is_upper_bound(t))
  616. if(-G[t] >= Gmax)
  617. {
  618. Gmax = -G[t];
  619. Gmax_idx = t;
  620. }
  621. }
  622. else
  623. {
  624. if(!is_lower_bound(t))
  625. if(G[t] >= Gmax)
  626. {
  627. Gmax = G[t];
  628. Gmax_idx = t;
  629. }
  630. }
  631. int i = Gmax_idx;
  632. Qfloat[] Q_i = null;
  633. if(i != -1) // null Q_i not accessed: Gmax=-INF if i=-1
  634. Q_i = Q.get_Q(i,active_size);
  635. for(int j=0;j<active_size;j++)
  636. {
  637. if(y[j]==+1)
  638. {
  639. if (!is_lower_bound(j))
  640. {
  641. double grad_diff=Gmax+G[j];
  642. if (G[j] >= Gmax2)
  643. Gmax2 = G[j];
  644. if (grad_diff > 0)
  645. {
  646. double obj_diff;
  647. double quad_coef = QD[i]+QD[j]-2.0*y[i]*Q_i[j];
  648. if (quad_coef > 0)
  649. obj_diff = -(grad_diff*grad_diff)/quad_coef;
  650. else
  651. obj_diff = -(grad_diff*grad_diff)/TAU;
  652. if (obj_diff <= obj_diff_min)
  653. {
  654. Gmin_idx=j;
  655. obj_diff_min = obj_diff;
  656. }
  657. }
  658. }
  659. }
  660. else
  661. {
  662. if (!is_upper_bound(j))
  663. {
  664. double grad_diff= Gmax-G[j];
  665. if (-G[j] >= Gmax2)
  666. Gmax2 = -G[j];
  667. if (grad_diff > 0)
  668. {
  669. double obj_diff;
  670. double quad_coef = QD[i]+QD[j]+2.0*y[i]*Q_i[j];
  671. if (quad_coef > 0)
  672. obj_diff = -(grad_diff*grad_diff)/quad_coef;
  673. else
  674. obj_diff = -(grad_diff*grad_diff)/TAU;
  675. if (obj_diff <= obj_diff_min)
  676. {
  677. Gmin_idx=j;
  678. obj_diff_min = obj_diff;
  679. }
  680. }
  681. }
  682. }
  683. }
  684. if(Gmax+Gmax2 < eps || Gmin_idx == -1)
  685. return 1;
  686. working_set[0] = Gmax_idx;
  687. working_set[1] = Gmin_idx;
  688. return 0;
  689. }
  690. private boolean be_shrunk(int i, double Gmax1, double Gmax2)
  691. {
  692. if(is_upper_bound(i))
  693. {
  694. if(y[i]==+1)
  695. return(-G[i] > Gmax1);
  696. else
  697. return(-G[i] > Gmax2);
  698. }
  699. else if(is_lower_bound(i))
  700. {
  701. if(y[i]==+1)
  702. return(G[i] > Gmax2);
  703. else
  704. return(G[i] > Gmax1);
  705. }
  706. else
  707. return(false);
  708. }
  709. void do_shrinking()
  710. {
  711. int i;
  712. double Gmax1 = -INF; // max { -y_i * grad(f)_i | i in I_up(\alpha) }
  713. double Gmax2 = -INF; // max { y_i * grad(f)_i | i in I_low(\alpha) }
  714. // find maximal violating pair first
  715. for(i=0;i<active_size;i++)
  716. {
  717. if(y[i]==+1)
  718. {
  719. if(!is_upper_bound(i))
  720. {
  721. if(-G[i] >= Gmax1)
  722. Gmax1 = -G[i];
  723. }
  724. if(!is_lower_bound(i))
  725. {
  726. if(G[i] >= Gmax2)
  727. Gmax2 = G[i];
  728. }
  729. }
  730. else
  731. {
  732. if(!is_upper_bound(i))
  733. {
  734. if(-G[i] >= Gmax2)
  735. Gmax2 = -G[i];
  736. }
  737. if(!is_lower_bound(i))
  738. {
  739. if(G[i] >= Gmax1)
  740. Gmax1 = G[i];
  741. }
  742. }
  743. }
  744. if(unshrink == false && Gmax1 + Gmax2 <= eps*10)
  745. {
  746. unshrink = true;
  747. reconstruct_gradient();
  748. active_size = l;
  749. }
  750. for(i=0;i<active_size;i++)
  751. if (be_shrunk(i, Gmax1, Gmax2))
  752. {
  753. active_size--;
  754. while (active_size > i)
  755. {
  756. if (!be_shrunk(active_size, Gmax1, Gmax2))
  757. {
  758. swap_index(i,active_size);
  759. break;
  760. }
  761. active_size--;
  762. }
  763. }
  764. }
  765. double calculate_rho()
  766. {
  767. double r;
  768. int nr_free = 0;
  769. double ub = INF, lb = -INF, sum_free = 0;
  770. for(int i=0;i<active_size;i++)
  771. {
  772. double yG = y[i]*G[i];
  773. if(is_lower_bound(i))
  774. {
  775. if(y[i] > 0)
  776. ub = Math.min(ub,yG);
  777. else
  778. lb = Math.max(lb,yG);
  779. }
  780. else if(is_upper_bound(i))
  781. {
  782. if(y[i] < 0)
  783. ub = Math.min(ub,yG);
  784. else
  785. lb = Math.max(lb,yG);
  786. }
  787. else
  788. {
  789. ++nr_free;
  790. sum_free += yG;
  791. }
  792. }
  793. if(nr_free>0)
  794. r = sum_free/nr_free;
  795. else
  796. r = (ub+lb)/2;
  797. return r;
  798. }
  799. }
  800. //
  801. // Solver for nu-svm classification and regression
  802. //
  803. // additional constraint: e^T \alpha = constant
  804. //
  805. final class Solver_NU extends Solver
  806. {
  807. private SolutionInfo si;
  808. void Solve(int l, QMatrix Q, double[] p, byte[] y,
  809. double[] alpha, double Cp, double Cn, double eps,
  810. SolutionInfo si, int shrinking)
  811. {
  812. this.si = si;
  813. super.Solve(l,Q,p,y,alpha,Cp,Cn,eps,si,shrinking);
  814. }
  815. // return 1 if already optimal, return 0 otherwise
  816. int select_working_set(int[] working_set)
  817. {
  818. // return i,j such that y_i = y_j and
  819. // i: maximizes -y_i * grad(f)_i, i in I_up(\alpha)
  820. // j: minimizes the decrease of obj value
  821. // (if quadratic coefficeint <= 0, replace it with tau)
  822. // -y_j*grad(f)_j < -y_i*grad(f)_i, j in I_low(\alpha)
  823. double Gmaxp = -INF;
  824. double Gmaxp2 = -INF;
  825. int Gmaxp_idx = -1;
  826. double Gmaxn = -INF;
  827. double Gmaxn2 = -INF;
  828. int Gmaxn_idx = -1;
  829. int Gmin_idx = -1;
  830. double obj_diff_min = INF;
  831. for(int t=0;t<active_size;t++)
  832. if(y[t]==+1)
  833. {
  834. if(!is_upper_bound(t))
  835. if(-G[t] >= Gmaxp)
  836. {
  837. Gmaxp = -G[t];
  838. Gmaxp_idx = t;
  839. }
  840. }
  841. else
  842. {
  843. if(!is_lower_bound(t))
  844. if(G[t] >= Gmaxn)
  845. {
  846. Gmaxn = G[t];
  847. Gmaxn_idx = t;
  848. }
  849. }
  850. int ip = Gmaxp_idx;
  851. int in = Gmaxn_idx;
  852. Qfloat[] Q_ip = null;
  853. Qfloat[] Q_in = null;
  854. if(ip != -1) // null Q_ip not accessed: Gmaxp=-INF if ip=-1
  855. Q_ip = Q.get_Q(ip,active_size);
  856. if(in != -1)
  857. Q_in = Q.get_Q(in,active_size);
  858. for(int j=0;j<active_size;j++)
  859. {
  860. if(y[j]==+1)
  861. {
  862. if (!is_lower_bound(j))
  863. {
  864. double grad_diff=Gmaxp+G[j];
  865. if (G[j] >= Gmaxp2)
  866. Gmaxp2 = G[j];
  867. if (grad_diff > 0)
  868. {
  869. double obj_diff;
  870. double quad_coef = QD[ip]+QD[j]-2*Q_ip[j];
  871. if (quad_coef > 0)
  872. obj_diff = -(grad_diff*grad_diff)/quad_coef;
  873. else
  874. obj_diff = -(grad_diff*grad_diff)/TAU;
  875. if (obj_diff <= obj_diff_min)
  876. {
  877. Gmin_idx=j;
  878. obj_diff_min = obj_diff;
  879. }
  880. }
  881. }
  882. }
  883. else
  884. {
  885. if (!is_upper_bound(j))
  886. {
  887. double grad_diff=Gmaxn-G[j];
  888. if (-G[j] >= Gmaxn2)
  889. Gmaxn2 = -G[j];
  890. if (grad_diff > 0)
  891. {
  892. double obj_diff;
  893. double quad_coef = QD[in]+QD[j]-2*Q_in[j];
  894. if (quad_coef > 0)
  895. obj_diff = -(grad_diff*grad_diff)/quad_coef;
  896. else
  897. obj_diff = -(grad_diff*grad_diff)/TAU;
  898. if (obj_diff <= obj_diff_min)
  899. {
  900. Gmin_idx=j;
  901. obj_diff_min = obj_diff;
  902. }
  903. }
  904. }
  905. }
  906. }
  907. if(Math.max(Gmaxp+Gmaxp2,Gmaxn+Gmaxn2) < eps || Gmin_idx == -1)
  908. return 1;
  909. if(y[Gmin_idx] == +1)
  910. working_set[0] = Gmaxp_idx;
  911. else
  912. working_set[0] = Gmaxn_idx;
  913. working_set[1] = Gmin_idx;
  914. return 0;
  915. }
  916. private boolean be_shrunk(int i, double Gmax1, double Gmax2, double Gmax3, double Gmax4)
  917. {
  918. if(is_upper_bound(i))
  919. {
  920. if(y[i]==+1)
  921. return(-G[i] > Gmax1);
  922. else
  923. return(-G[i] > Gmax4);
  924. }
  925. else if(is_lower_bound(i))
  926. {
  927. if(y[i]==+1)
  928. return(G[i] > Gmax2);
  929. else
  930. return(G[i] > Gmax3);
  931. }
  932. else
  933. return(false);
  934. }
  935. void do_shrinking()
  936. {
  937. double Gmax1 = -INF; // max { -y_i * grad(f)_i | y_i = +1, i in I_up(\alpha) }
  938. double Gmax2 = -INF; // max { y_i * grad(f)_i | y_i = +1, i in I_low(\alpha) }
  939. double Gmax3 = -INF; // max { -y_i * grad(f)_i | y_i = -1, i in I_up(\alpha) }
  940. double Gmax4 = -INF; // max { y_i * grad(f)_i | y_i = -1, i in I_low(\alpha) }
  941. // find maximal violating pair first
  942. int i;
  943. for(i=0;i<active_size;i++)
  944. {
  945. if(!is_upper_bound(i))
  946. {
  947. if(y[i]==+1)
  948. {
  949. if(-G[i] > Gmax1) Gmax1 = -G[i];
  950. }
  951. else if(-G[i] > Gmax4) Gmax4 = -G[i];
  952. }
  953. if(!is_lower_bound(i))
  954. {
  955. if(y[i]==+1)
  956. {
  957. if(G[i] > Gmax2) Gmax2 = G[i];
  958. }
  959. else if(G[i] > Gmax3) Gmax3 = G[i];
  960. }
  961. }
  962. if(unshrink == false && Math.max(Gmax1+Gmax2,Gmax3+Gmax4) <= eps*10)
  963. {
  964. unshrink = true;
  965. reconstruct_gradient();
  966. active_size = l;
  967. }
  968. for(i=0;i<active_size;i++)
  969. if (be_shrunk(i, Gmax1, Gmax2, Gmax3, Gmax4))
  970. {
  971. active_size--;
  972. while (active_size > i)
  973. {
  974. if (!be_shrunk(active_size, Gmax1, Gmax2, Gmax3, Gmax4))
  975. {
  976. swap_index(i,active_size);
  977. break;
  978. }
  979. active_size--;
  980. }
  981. }
  982. }
  983. double calculate_rho()
  984. {
  985. int nr_free1 = 0,nr_free2 = 0;
  986. double ub1 = INF, ub2 = INF;
  987. double lb1 = -INF, lb2 = -INF;
  988. double sum_free1 = 0, sum_free2 = 0;
  989. for(int i=0;i<active_size;i++)
  990. {
  991. if(y[i]==+1)
  992. {
  993. if(is_lower_bound(i))
  994. ub1 = Math.min(ub1,G[i]);
  995. else if(is_upper_bound(i))
  996. lb1 = Math.max(lb1,G[i]);
  997. else
  998. {
  999. ++nr_free1;
  1000. sum_free1 += G[i];
  1001. }
  1002. }
  1003. else
  1004. {
  1005. if(is_lower_bound(i))
  1006. ub2 = Math.min(ub2,G[i]);
  1007. else if(is_upper_bound(i))
  1008. lb2 = Math.max(lb2,G[i]);
  1009. else
  1010. {
  1011. ++nr_free2;
  1012. sum_free2 += G[i];
  1013. }
  1014. }
  1015. }
  1016. double r1,r2;
  1017. if(nr_free1 > 0)
  1018. r1 = sum_free1/nr_free1;
  1019. else
  1020. r1 = (ub1+lb1)/2;
  1021. if(nr_free2 > 0)
  1022. r2 = sum_free2/nr_free2;
  1023. else
  1024. r2 = (ub2+lb2)/2;
  1025. si.r = (r1+r2)/2;
  1026. return (r1-r2)/2;
  1027. }
  1028. }
  1029. //
  1030. // Q matrices for various formulations
  1031. //
  1032. class SVC_Q extends Kernel
  1033. {
  1034. private final byte[] y;
  1035. private final Cache cache;
  1036. private final double[] QD;
  1037. SVC_Q(svm_problem prob, svm_parameter param, byte[] y_)
  1038. {
  1039. super(prob.l, prob.x, param);
  1040. y = (byte[])y_.clone();
  1041. cache = new Cache(prob.l,(long)(param.cache_size*(1<<20)));
  1042. QD = new double[prob.l];
  1043. for(int i=0;i<prob.l;i++)
  1044. QD[i] = kernel_function(i,i);
  1045. }
  1046. Qfloat[] get_Q(int i, int len)
  1047. {
  1048. Qfloat[][] data = new Qfloat[1][];
  1049. int start, j;
  1050. if((start = cache.get_data(i,data,len)) < len)
  1051. {
  1052. for(j=start;j<len;j++)
  1053. data[0][j] = (Qfloat)(y[i]*y[j]*kernel_function(i,j));
  1054. }
  1055. return data[0];
  1056. }
  1057. double[] get_QD()
  1058. {
  1059. return QD;
  1060. }
  1061. void swap_index(int i, int j)
  1062. {
  1063. cache.swap_index(i,j);
  1064. super.swap_index(i,j);
  1065. swap(byte,y[i],y[j]);
  1066. swap(double,QD[i],QD[j]);
  1067. }
  1068. }
  1069. class ONE_CLASS_Q extends Kernel
  1070. {
  1071. private final Cache cache;
  1072. private final double[] QD;
  1073. ONE_CLASS_Q(svm_problem prob, svm_parameter param)
  1074. {
  1075. super(prob.l, prob.x, param);
  1076. cache = new Cache(prob.l,(long)(param.cache_size*(1<<20)));
  1077. QD = new double[prob.l];
  1078. for(int i=0;i<prob.l;i++)
  1079. QD[i] = kernel_function(i,i);
  1080. }
  1081. Qfloat[] get_Q(int i, int len)
  1082. {
  1083. Qfloat[][] data = new Qfloat[1][];
  1084. int start, j;
  1085. if((start = cache.get_data(i,data,len)) < len)
  1086. {
  1087. for(j=start;j<len;j++)
  1088. data[0][j] = (Qfloat)kernel_function(i,j);
  1089. }
  1090. return data[0];
  1091. }
  1092. double[] get_QD()
  1093. {
  1094. return QD;
  1095. }
  1096. void swap_index(int i, int j)
  1097. {
  1098. cache.swap_index(i,j);
  1099. super.swap_index(i,j);
  1100. swap(double,QD[i],QD[j]);
  1101. }
  1102. }
  1103. class SVR_Q extends Kernel
  1104. {
  1105. private final int l;
  1106. private final Cache cache;
  1107. private final byte[] sign;
  1108. private final int[] index;
  1109. private int next_buffer;
  1110. private Qfloat[][] buffer;
  1111. private final double[] QD;
  1112. SVR_Q(svm_problem prob, svm_parameter param)
  1113. {
  1114. super(prob.l, prob.x, param);
  1115. l = prob.l;
  1116. cache = new Cache(l,(long)(param.cache_size*(1<<20)));
  1117. QD = new double[2*l];
  1118. sign = new byte[2*l];
  1119. index = new int[2*l];
  1120. for(int k=0;k<l;k++)
  1121. {
  1122. sign[k] = 1;
  1123. sign[k+l] = -1;
  1124. index[k] = k;
  1125. index[k+l] = k;
  1126. QD[k] = kernel_function(k,k);
  1127. QD[k+l] = QD[k];
  1128. }
  1129. buffer = new Qfloat[2][2*l];
  1130. next_buffer = 0;
  1131. }
  1132. void swap_index(int i, int j)
  1133. {
  1134. swap(byte,sign[i],sign[j]);
  1135. swap(int,index[i],index[j]);
  1136. swap(double,QD[i],QD[j]);
  1137. }
  1138. Qfloat[] get_Q(int i, int len)
  1139. {
  1140. Qfloat[][] data = new Qfloat[1][];
  1141. int j, real_i = index[i];
  1142. if(cache.get_data(real_i,data,l) < l)
  1143. {
  1144. for(j=0;j<l;j++)
  1145. data[0][j] = (Qfloat)kernel_function(real_i,j);
  1146. }
  1147. // reorder and copy
  1148. Qfloat buf[] = buffer[next_buffer];
  1149. next_buffer = 1 - next_buffer;
  1150. byte si = sign[i];
  1151. for(j=0;j<len;j++)
  1152. buf[j] = (Qfloat) si * sign[j] * data[0][index[j]];
  1153. return buf;
  1154. }
  1155. double[] get_QD()
  1156. {
  1157. return QD;
  1158. }
  1159. }
  1160. public class svm {
  1161. //
  1162. // construct and solve various formulations
  1163. //
  1164. public static final int LIBSVM_VERSION=322;
  1165. public static final Random rand = new Random();
  1166. private static svm_print_interface svm_print_stdout = new svm_print_interface()
  1167. {
  1168. public void print(String s)
  1169. {
  1170. System.out.print(s);
  1171. System.out.flush();
  1172. }
  1173. };
  1174. private static svm_print_interface svm_print_string = svm_print_stdout;
  1175. static void info(String s)
  1176. {
  1177. svm_print_string.print(s);
  1178. }
  1179. private static void solve_c_svc(svm_problem prob, svm_parameter param,
  1180. double[] alpha, Solver.SolutionInfo si,
  1181. double Cp, double Cn)
  1182. {
  1183. int l = prob.l;
  1184. double[] minus_ones = new double[l];
  1185. byte[] y = new byte[l];
  1186. int i;
  1187. for(i=0;i<l;i++)
  1188. {
  1189. alpha[i] = 0;
  1190. minus_ones[i] = -1;
  1191. if(prob.y[i] > 0) y[i] = +1; else y[i] = -1;
  1192. }
  1193. Solver s = new Solver();
  1194. s.Solve(l, new SVC_Q(prob,param,y), minus_ones, y,
  1195. alpha, Cp, Cn, param.eps, si, param.shrinking);
  1196. double sum_alpha=0;
  1197. for(i=0;i<l;i++)
  1198. sum_alpha += alpha[i];
  1199. if (Cp==Cn)
  1200. svm.info("nu = "+sum_alpha/(Cp*prob.l)+"\n");
  1201. for(i=0;i<l;i++)
  1202. alpha[i] *= y[i];
  1203. }
  1204. private static void solve_nu_svc(svm_problem prob, svm_parameter param,
  1205. double[] alpha, Solver.SolutionInfo si)
  1206. {
  1207. int i;
  1208. int l = prob.l;
  1209. double nu = param.nu;
  1210. byte[] y = new byte[l];
  1211. for(i=0;i<l;i++)
  1212. if(prob.y[i]>0)
  1213. y[i] = +1;
  1214. else
  1215. y[i] = -1;
  1216. double sum_pos = nu*l/2;
  1217. double sum_neg = nu*l/2;
  1218. for(i=0;i<l;i++)
  1219. if(y[i] == +1)
  1220. {
  1221. alpha[i] = Math.min(1.0,sum_pos);
  1222. sum_pos -= alpha[i];
  1223. }
  1224. else
  1225. {
  1226. alpha[i] = Math.min(1.0,sum_neg);
  1227. sum_neg -= alpha[i];
  1228. }
  1229. double[] zeros = new double[l];
  1230. for(i=0;i<l;i++)
  1231. zeros[i] = 0;
  1232. Solver_NU s = new Solver_NU();
  1233. s.Solve(l, new SVC_Q(prob,param,y), zeros, y,
  1234. alpha, 1.0, 1.0, param.eps, si, param.shrinking);
  1235. double r = si.r;
  1236. svm.info("C = "+1/r+"\n");
  1237. for(i=0;i<l;i++)
  1238. alpha[i] *= y[i]/r;
  1239. si.rho /= r;
  1240. si.obj /= (r*r);
  1241. si.upper_bound_p = 1/r;
  1242. si.upper_bound_n = 1/r;
  1243. }
  1244. private static void solve_one_class(svm_problem prob, svm_parameter param,
  1245. double[] alpha, Solver.SolutionInfo si)
  1246. {
  1247. int l = prob.l;
  1248. double[] zeros = new double[l];
  1249. byte[] ones = new byte[l];
  1250. int i;
  1251. int n = (int)(param.nu*prob.l); // # of alpha's at upper bound
  1252. for(i=0;i<n;i++)
  1253. alpha[i] = 1;
  1254. if(n<prob.l)
  1255. alpha[n] = param.nu * prob.l - n;
  1256. for(i=n+1;i<l;i++)
  1257. alpha[i] = 0;
  1258. for(i=0;i<l;i++)
  1259. {
  1260. zeros[i] = 0;
  1261. ones[i] = 1;
  1262. }
  1263. Solver s = new Solver();
  1264. s.Solve(l, new ONE_CLASS_Q(prob,param), zeros, ones,
  1265. alpha, 1.0, 1.0, param.eps, si, param.shrinking);
  1266. }
  1267. private static void solve_epsilon_svr(svm_problem prob, svm_parameter param,
  1268. double[] alpha, Solver.SolutionInfo si)
  1269. {
  1270. int l = prob.l;
  1271. double[] alpha2 = new double[2*l];
  1272. double[] linear_term = new double[2*l];
  1273. byte[] y = new byte[2*l];
  1274. int i;
  1275. for(i=0;i<l;i++)
  1276. {
  1277. alpha2[i] = 0;
  1278. linear_term[i] = param.p - prob.y[i];
  1279. y[i] = 1;
  1280. alpha2[i+l] = 0;
  1281. linear_term[i+l] = param.p + prob.y[i];
  1282. y[i+l] = -1;
  1283. }
  1284. Solver s = new Solver();
  1285. s.Solve(2*l, new SVR_Q(prob,param), linear_term, y,
  1286. alpha2, param.C, param.C, param.eps, si, param.shrinking);
  1287. double sum_alpha = 0;
  1288. for(i=0;i<l;i++)
  1289. {
  1290. alpha[i] = alpha2[i] - alpha2[i+l];
  1291. sum_alpha += Math.abs(alpha[i]);
  1292. }
  1293. svm.info("nu = "+sum_alpha/(param.C*l)+"\n");
  1294. }
  1295. private static void solve_nu_svr(svm_problem prob, svm_parameter param,
  1296. double[] alpha, Solver.SolutionInfo si)
  1297. {
  1298. int l = prob.l;
  1299. double C = param.C;
  1300. double[] alpha2 = new double[2*l];
  1301. double[] linear_term = new double[2*l];
  1302. byte[] y = new byte[2*l];
  1303. int i;
  1304. double sum = C * param.nu * l / 2;
  1305. for(i=0;i<l;i++)
  1306. {
  1307. alpha2[i] = alpha2[i+l] = Math.min(sum,C);
  1308. sum -= alpha2[i];
  1309. linear_term[i] = - prob.y[i];
  1310. y[i] = 1;
  1311. linear_term[i+l] = prob.y[i];
  1312. y[i+l] = -1;
  1313. }
  1314. Solver_NU s = new Solver_NU();
  1315. s.Solve(2*l, new SVR_Q(prob,param), linear_term, y,
  1316. alpha2, C, C, param.eps, si, param.shrinking);
  1317. svm.info("epsilon = "+(-si.r)+"\n");
  1318. for(i=0;i<l;i++)
  1319. alpha[i] = alpha2[i] - alpha2[i+l];
  1320. }
  1321. //
  1322. // decision_function
  1323. //
  1324. static class decision_function
  1325. {
  1326. double[] alpha;
  1327. double rho;
  1328. };
  1329. static decision_function svm_train_one(
  1330. svm_problem prob, svm_parameter param,
  1331. double Cp, double Cn)
  1332. {
  1333. double[] alpha = new double[prob.l];
  1334. Solver.SolutionInfo si = new Solver.SolutionInfo();
  1335. switch(param.svm_type)
  1336. {
  1337. case svm_parameter.C_SVC:
  1338. solve_c_svc(prob,param,alpha,si,Cp,Cn);
  1339. break;
  1340. case svm_parameter.NU_SVC:
  1341. solve_nu_svc(prob,param,alpha,si);
  1342. break;
  1343. case svm_parameter.ONE_CLASS:
  1344. solve_one_class(prob,param,alpha,si);
  1345. break;
  1346. case svm_parameter.EPSILON_SVR:
  1347. solve_epsilon_svr(prob,param,alpha,si);
  1348. break;
  1349. case svm_parameter.NU_SVR:
  1350. solve_nu_svr(prob,param,alpha,si);
  1351. break;
  1352. }
  1353. svm.info("obj = "+si.obj+", rho = "+si.rho+"\n");
  1354. // output SVs
  1355. int nSV = 0;
  1356. int nBSV = 0;
  1357. for(int i=0;i<prob.l;i++)
  1358. {
  1359. if(Math.abs(alpha[i]) > 0)
  1360. {
  1361. ++nSV;
  1362. if(prob.y[i] > 0)
  1363. {
  1364. if(Math.abs(alpha[i]) >= si.upper_bound_p)
  1365. ++nBSV;
  1366. }
  1367. else
  1368. {
  1369. if(Math.abs(alpha[i]) >= si.upper_bound_n)
  1370. ++nBSV;
  1371. }
  1372. }
  1373. }
  1374. svm.info("nSV = "+nSV+", nBSV = "+nBSV+"\n");
  1375. decision_function f = new decision_function();
  1376. f.alpha = alpha;
  1377. f.rho = si.rho;
  1378. return f;
  1379. }
  1380. // Platt's binary SVM Probablistic Output: an improvement from Lin et al.
  1381. private static void sigmoid_train(int l, double[] dec_values, double[] labels,
  1382. double[] probAB)
  1383. {
  1384. double A, B;
  1385. double prior1=0, prior0 = 0;
  1386. int i;
  1387. for (i=0;i<l;i++)
  1388. if (labels[i] > 0) prior1+=1;
  1389. else prior0+=1;
  1390. int max_iter=100; // Maximal number of iterations
  1391. double min_step=1e-10; // Minimal step taken in line search
  1392. double sigma=1e-12; // For numerically strict PD of Hessian
  1393. double eps=1e-5;
  1394. double hiTarget=(prior1+1.0)/(prior1+2.0);
  1395. double loTarget=1/(prior0+2.0);
  1396. double[] t= new double[l];
  1397. double fApB,p,q,h11,h22,h21,g1,g2,det,dA,dB,gd,stepsize;
  1398. double newA,newB,newf,d1,d2;
  1399. int iter;
  1400. // Initial Point and Initial Fun Value
  1401. A=0.0; B=Math.log((prior0+1.0)/(prior1+1.0));
  1402. double fval = 0.0;
  1403. for (i=0;i<l;i++)
  1404. {
  1405. if (labels[i]>0) t[i]=hiTarget;
  1406. else t[i]=loTarget;
  1407. fApB = dec_values[i]*A+B;
  1408. if (fApB>=0)
  1409. fval += t[i]*fApB + Math.log(1+Math.exp(-fApB));
  1410. else
  1411. fval += (t[i] - 1)*fApB +Math.log(1+Math.exp(fApB));
  1412. }
  1413. for (iter=0;iter<max_iter;iter++)
  1414. {
  1415. // Update Gradient and Hessian (use H' = H + sigma I)
  1416. h11=sigma; // numerically ensures strict PD
  1417. h22=sigma;
  1418. h21=0.0;g1=0.0;g2=0.0;
  1419. for (i=0;i<l;i++)
  1420. {
  1421. fApB = dec_values[i]*A+B;
  1422. if (fApB >= 0)
  1423. {
  1424. p=Math.exp(-fApB)/(1.0+Math.exp(-fApB));
  1425. q=1.0/(1.0+Math.exp(-fApB));
  1426. }
  1427. else
  1428. {
  1429. p=1.0/(1.0+Math.exp(fApB));
  1430. q=Math.exp(fApB)/(1.0+Math.exp(fApB));
  1431. }
  1432. d2=p*q;
  1433. h11+=dec_values[i]*dec_values[i]*d2;
  1434. h22+=d2;
  1435. h21+=dec_values[i]*d2;
  1436. d1=t[i]-p;
  1437. g1+=dec_values[i]*d1;
  1438. g2+=d1;
  1439. }
  1440. // Stopping Criteria
  1441. if (Math.abs(g1)<eps && Math.abs(g2)<eps)
  1442. break;
  1443. // Finding Newton direction: -inv(H') * g
  1444. det=h11*h22-h21*h21;
  1445. dA=-(h22*g1 - h21 * g2) / det;
  1446. dB=-(-h21*g1+ h11 * g2) / det;
  1447. gd=g1*dA+g2*dB;
  1448. stepsize = 1; // Line Search
  1449. while (stepsize >= min_step)
  1450. {
  1451. newA = A + stepsize * dA;
  1452. newB = B + stepsize * dB;
  1453. // New function value
  1454. newf = 0.0;
  1455. for (i=0;i<l;i++)
  1456. {
  1457. fApB = dec_values[i]*newA+newB;
  1458. if (fApB >= 0)
  1459. newf += t[i]*fApB + Math.log(1+Math.exp(-fApB));
  1460. else
  1461. newf += (t[i] - 1)*fApB +Math.log(1+Math.exp(fApB));
  1462. }
  1463. // Check sufficient decrease
  1464. if (newf<fval+0.0001*stepsize*gd)
  1465. {
  1466. A=newA;B=newB;fval=newf;
  1467. break;
  1468. }
  1469. else
  1470. stepsize = stepsize / 2.0;
  1471. }
  1472. if (stepsize < min_step)
  1473. {
  1474. svm.info("Line search fails in two-class probability estimates\n");
  1475. break;
  1476. }
  1477. }
  1478. if (iter>=max_iter)
  1479. svm.info("Reaching maximal iterations in two-class probability estimates\n");
  1480. probAB[0]=A;probAB[1]=B;
  1481. }
  1482. private static double sigmoid_predict(double decision_value, double A, double B)
  1483. {
  1484. double fApB = decision_value*A+B;
  1485. if (fApB >= 0)
  1486. return Math.exp(-fApB)/(1.0+Math.exp(-fApB));
  1487. else
  1488. return 1.0/(1+Math.exp(fApB)) ;
  1489. }
  1490. // Method 2 from the multiclass_prob paper by Wu, Lin, and Weng
  1491. private static void multiclass_probability(int k, double[][] r, double[] p)
  1492. {
  1493. int t,j;
  1494. int iter = 0, max_iter=Math.max(100,k);
  1495. double[][] Q=new double[k][k];
  1496. double[] Qp=new double[k];
  1497. double pQp, eps=0.005/k;
  1498. for (t=0;t<k;t++)
  1499. {
  1500. p[t]=1.0/k; // Valid if k = 1
  1501. Q[t][t]=0;
  1502. for (j=0;j<t;j++)
  1503. {
  1504. Q[t][t]+=r[j][t]*r[j][t];
  1505. Q[t][j]=Q[j][t];
  1506. }
  1507. for (j=t+1;j<k;j++)
  1508. {
  1509. Q[t][t]+=r[j][t]*r[j][t];
  1510. Q[t][j]=-r[j][t]*r[t][j];
  1511. }
  1512. }
  1513. for (iter=0;iter<max_iter;iter++)
  1514. {
  1515. // stopping condition, recalculate QP,pQP for numerical accuracy
  1516. pQp=0;
  1517. for (t=0;t<k;t++)
  1518. {
  1519. Qp[t]=0;
  1520. for (j=0;j<k;j++)
  1521. Qp[t]+=Q[t][j]*p[j];
  1522. pQp+=p[t]*Qp[t];
  1523. }
  1524. double max_error=0;
  1525. for (t=0;t<k;t++)
  1526. {
  1527. double error=Math.abs(Qp[t]-pQp);
  1528. if (error>max_error)
  1529. max_error=error;
  1530. }
  1531. if (max_error<eps) break;
  1532. for (t=0;t<k;t++)
  1533. {
  1534. double diff=(-Qp[t]+pQp)/Q[t][t];
  1535. p[t]+=diff;
  1536. pQp=(pQp+diff*(diff*Q[t][t]+2*Qp[t]))/(1+diff)/(1+diff);
  1537. for (j=0;j<k;j++)
  1538. {
  1539. Qp[j]=(Qp[j]+diff*Q[t][j])/(1+diff);
  1540. p[j]/=(1+diff);
  1541. }
  1542. }
  1543. }
  1544. if (iter>=max_iter)
  1545. svm.info("Exceeds max_iter in multiclass_prob\n");
  1546. }
  1547. // Cross-validation decision values for probability estimates
  1548. private static void svm_binary_svc_probability(svm_problem prob, svm_parameter param, double Cp, double Cn, double[] probAB)
  1549. {
  1550. int i;
  1551. int nr_fold = 5;
  1552. int[] perm = new int[prob.l];
  1553. double[] dec_values = new double[prob.l];
  1554. // random shuffle
  1555. for(i=0;i<prob.l;i++) perm[i]=i;
  1556. for(i=0;i<prob.l;i++)
  1557. {
  1558. int j = i+rand.nextInt(prob.l-i);
  1559. swap(int,perm[i],perm[j]);
  1560. }
  1561. for(i=0;i<nr_fold;i++)
  1562. {
  1563. int begin = i*prob.l/nr_fold;
  1564. int end = (i+1)*prob.l/nr_fold;
  1565. int j,k;
  1566. svm_problem subprob = new svm_problem();
  1567. subprob.l = prob.l-(end-begin);
  1568. subprob.x = new svm_node[subprob.l][];
  1569. subprob.y = new double[subprob.l];
  1570. k=0;
  1571. for(j=0;j<begin;j++)
  1572. {
  1573. subprob.x[k] = prob.x[perm[j]];
  1574. subprob.y[k] = prob.y[perm[j]];
  1575. ++k;
  1576. }
  1577. for(j=end;j<prob.l;j++)
  1578. {
  1579. subprob.x[k] = prob.x[perm[j]];
  1580. subprob.y[k] = prob.y[perm[j]];
  1581. ++k;
  1582. }
  1583. int p_count=0,n_count=0;
  1584. for(j=0;j<k;j++)
  1585. if(subprob.y[j]>0)
  1586. p_count++;
  1587. else
  1588. n_count++;
  1589. if(p_count==0 && n_count==0)
  1590. for(j=begin;j<end;j++)
  1591. dec_values[perm[j]] = 0;
  1592. else if(p_count > 0 && n_count == 0)
  1593. for(j=begin;j<end;j++)
  1594. dec_values[perm[j]] = 1;
  1595. else if(p_count == 0 && n_count > 0)
  1596. for(j=begin;j<end;j++)
  1597. dec_values[perm[j]] = -1;
  1598. else
  1599. {
  1600. svm_parameter subparam = (svm_parameter)param.clone();
  1601. subparam.probability=0;
  1602. subparam.C=1.0;
  1603. subparam.nr_weight=2;
  1604. subparam.weight_label = new int[2];
  1605. subparam.weight = new double[2];
  1606. subparam.weight_label[0]=+1;
  1607. subparam.weight_label[1]=-1;
  1608. subparam.weight[0]=Cp;
  1609. subparam.weight[1]=Cn;
  1610. svm_model submodel = svm_train(subprob,subparam);
  1611. for(j=begin;j<end;j++)
  1612. {
  1613. double[] dec_value=new double[1];
  1614. svm_predict_values(submodel,prob.x[perm[j]],dec_value);
  1615. dec_values[perm[j]]=dec_value[0];
  1616. // ensure +1 -1 order; reason not using CV subroutine
  1617. dec_values[perm[j]] *= submodel.label[0];
  1618. }
  1619. }
  1620. }
  1621. sigmoid_train(prob.l,dec_values,prob.y,probAB);
  1622. }
  1623. // Return parameter of a Laplace distribution
  1624. private static double svm_svr_probability(svm_problem prob, svm_parameter param)
  1625. {
  1626. int i;
  1627. int nr_fold = 5;
  1628. double[] ymv = new double[prob.l];
  1629. double mae = 0;
  1630. svm_parameter newparam = (svm_parameter)param.clone();
  1631. newparam.probability = 0;
  1632. svm_cross_validation(prob,newparam,nr_fold,ymv);
  1633. for(i=0;i<prob.l;i++)
  1634. {
  1635. ymv[i]=prob.y[i]-ymv[i];
  1636. mae += Math.abs(ymv[i]);
  1637. }
  1638. mae /= prob.l;
  1639. double std=Math.sqrt(2*mae*mae);
  1640. int count=0;
  1641. mae=0;
  1642. for(i=0;i<prob.l;i++)
  1643. if (Math.abs(ymv[i]) > 5*std)
  1644. count=count+1;
  1645. else
  1646. mae+=Math.abs(ymv[i]);
  1647. mae /= (prob.l-count);
  1648. svm.info("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma="+mae+"\n");
  1649. return mae;
  1650. }
  1651. // label: label name, start: begin of each class, count: #data of classes, perm: indices to the original data
  1652. // perm, length l, must be allocated before calling this subroutine
  1653. private static void svm_group_classes(svm_problem prob, int[] nr_class_ret, int[][] label_ret, int[][] start_ret, int[][] count_ret, int[] perm)
  1654. {
  1655. int l = prob.l;
  1656. int max_nr_class = 16;
  1657. int nr_class = 0;
  1658. int[] label = new int[max_nr_class];
  1659. int[] count = new int[max_nr_class];
  1660. int[] data_label = new int[l];
  1661. int i;
  1662. for(i=0;i<l;i++)
  1663. {
  1664. int this_label = (int)(prob.y[i]);
  1665. int j;
  1666. for(j=0;j<nr_class;j++)
  1667. {
  1668. if(this_label == label[j])
  1669. {
  1670. ++count[j];
  1671. break;
  1672. }
  1673. }
  1674. data_label[i] = j;
  1675. if(j == nr_class)
  1676. {
  1677. if(nr_class == max_nr_class)
  1678. {
  1679. max_nr_class *= 2;
  1680. int[] new_data = new int[max_nr_class];
  1681. System.arraycopy(label,0,new_data,0,label.length);
  1682. label = new_data;
  1683. new_data = new int[max_nr_class];
  1684. System.arraycopy(count,0,new_data,0,count.length);
  1685. count = new_data;
  1686. }
  1687. label[nr_class] = this_label;
  1688. count[nr_class] = 1;
  1689. ++nr_class;
  1690. }
  1691. }
  1692. //
  1693. // Labels are ordered by their first occurrence in the training set.
  1694. // However, for two-class sets with -1/+1 labels and -1 appears first,
  1695. // we swap labels to ensure that internally the binary SVM has positive data corresponding to the +1 instances.
  1696. //
  1697. if (nr_class == 2 && label[0] == -1 && label[1] == +1)
  1698. {
  1699. swap(int,label[0],label[1]);
  1700. swap(int,count[0],count[1]);
  1701. for(i=0;i<l;i++)
  1702. {
  1703. if(data_label[i] == 0)
  1704. data_label[i] = 1;
  1705. else
  1706. data_label[i] = 0;
  1707. }
  1708. }
  1709. int[] start = new int[nr_class];
  1710. start[0] = 0;
  1711. for(i=1;i<nr_class;i++)
  1712. start[i] = start[i-1]+count[i-1];
  1713. for(i=0;i<l;i++)
  1714. {
  1715. perm[start[data_label[i]]] = i;
  1716. ++start[data_label[i]];
  1717. }
  1718. start[0] = 0;
  1719. for(i=1;i<nr_class;i++)
  1720. start[i] = start[i-1]+count[i-1];
  1721. nr_class_ret[0] = nr_class;
  1722. label_ret[0] = label;
  1723. start_ret[0] = start;
  1724. count_ret[0] = count;
  1725. }
  1726. //
  1727. // Interface functions
  1728. //
  1729. public static svm_model svm_train(svm_problem prob, svm_parameter param)
  1730. {
  1731. svm_model model = new svm_model();
  1732. model.param = param;
  1733. if(param.svm_type == svm_parameter.ONE_CLASS ||
  1734. param.svm_type == svm_parameter.EPSILON_SVR ||
  1735. param.svm_type == svm_parameter.NU_SVR)
  1736. {
  1737. // regression or one-class-svm
  1738. model.nr_class = 2;
  1739. model.label = null;
  1740. model.nSV = null;
  1741. model.probA = null; model.probB = null;
  1742. model.sv_coef = new double[1][];
  1743. if(param.probability == 1 &&
  1744. (param.svm_type == svm_parameter.EPSILON_SVR ||
  1745. param.svm_type == svm_parameter.NU_SVR))
  1746. {
  1747. model.probA = new double[1];
  1748. model.probA[0] = svm_svr_probability(prob,param);
  1749. }
  1750. decision_function f = svm_train_one(prob,param,0,0);
  1751. model.rho = new double[1];
  1752. model.rho[0] = f.rho;
  1753. int nSV = 0;
  1754. int i;
  1755. for(i=0;i<prob.l;i++)
  1756. if(Math.abs(f.alpha[i]) > 0) ++nSV;
  1757. model.l = nSV;
  1758. model.SV = new svm_node[nSV][];
  1759. model.sv_coef[0] = new double[nSV];
  1760. model.sv_indices = new int[nSV];
  1761. int j = 0;
  1762. for(i=0;i<prob.l;i++)
  1763. if(Math.abs(f.alpha[i]) > 0)
  1764. {
  1765. model.SV[j] = prob.x[i];
  1766. model.sv_coef[0][j] = f.alpha[i];
  1767. model.sv_indices[j] = i+1;
  1768. ++j;
  1769. }
  1770. }
  1771. else
  1772. {
  1773. // classification
  1774. int l = prob.l;
  1775. int[] tmp_nr_class = new int[1];
  1776. int[][] tmp_label = new int[1][];
  1777. int[][] tmp_start = new int[1][];
  1778. int[][] tmp_count = new int[1][];
  1779. int[] perm = new int[l];
  1780. // group training data of the same class
  1781. svm_group_classes(prob,tmp_nr_class,tmp_label,tmp_start,tmp_count,perm);
  1782. int nr_class = tmp_nr_class[0];
  1783. int[] label = tmp_label[0];
  1784. int[] start = tmp_start[0];
  1785. int[] count = tmp_count[0];
  1786. if(nr_class == 1)
  1787. svm.info("WARNING: training data in only one class. See README for details.\n");
  1788. svm_node[][] x = new svm_node[l][];
  1789. int i;
  1790. for(i=0;i<l;i++)
  1791. x[i] = prob.x[perm[i]];
  1792. // calculate weighted C
  1793. double[] weighted_C = new double[nr_class];
  1794. for(i=0;i<nr_class;i++)
  1795. weighted_C[i] = param.C;
  1796. for(i=0;i<param.nr_weight;i++)
  1797. {
  1798. int j;
  1799. for(j=0;j<nr_class;j++)
  1800. if(param.weight_label[i] == label[j])
  1801. break;
  1802. if(j == nr_class)
  1803. System.err.print("WARNING: class label "+param.weight_label[i]+" specified in weight is not found\n");
  1804. else
  1805. weighted_C[j] *= param.weight[i];
  1806. }
  1807. // train k*(k-1)/2 models
  1808. boolean[] nonzero = new boolean[l];
  1809. for(i=0;i<l;i++)
  1810. nonzero[i] = false;
  1811. decision_function[] f = new decision_function[nr_class*(nr_class-1)/2];
  1812. double[] probA=null,probB=null;
  1813. if (param.probability == 1)
  1814. {
  1815. probA=new double[nr_class*(nr_class-1)/2];
  1816. probB=new double[nr_class*(nr_class-1)/2];
  1817. }
  1818. int p = 0;
  1819. for(i=0;i<nr_class;i++)
  1820. for(int j=i+1;j<nr_class;j++)
  1821. {
  1822. svm_problem sub_prob = new svm_problem();
  1823. int si = start[i], sj = start[j];
  1824. int ci = count[i], cj = count[j];
  1825. sub_prob.l = ci+cj;
  1826. sub_prob.x = new svm_node[sub_prob.l][];
  1827. sub_prob.y = new double[sub_prob.l];
  1828. int k;
  1829. for(k=0;k<ci;k++)
  1830. {
  1831. sub_prob.x[k] = x[si+k];
  1832. sub_prob.y[k] = +1;
  1833. }
  1834. for(k=0;k<cj;k++)
  1835. {
  1836. sub_prob.x[ci+k] = x[sj+k];
  1837. sub_prob.y[ci+k] = -1;
  1838. }
  1839. if(param.probability == 1)
  1840. {
  1841. double[] probAB=new double[2];
  1842. svm_binary_svc_probability(sub_prob,param,weighted_C[i],weighted_C[j],probAB);
  1843. probA[p]=probAB[0];
  1844. probB[p]=probAB[1];
  1845. }
  1846. f[p] = svm_train_one(sub_prob,param,weighted_C[i],weighted_C[j]);
  1847. for(k=0;k<ci;k++)
  1848. if(!nonzero[si+k] && Math.abs(f[p].alpha[k]) > 0)
  1849. nonzero[si+k] = true;
  1850. for(k=0;k<cj;k++)
  1851. if(!nonzero[sj+k] && Math.abs(f[p].alpha[ci+k]) > 0)
  1852. nonzero[sj+k] = true;
  1853. ++p;
  1854. }
  1855. // build output
  1856. model.nr_class = nr_class;
  1857. model.label = new int[nr_class];
  1858. for(i=0;i<nr_class;i++)
  1859. model.label[i] = label[i];
  1860. model.rho = new double[nr_class*(nr_class-1)/2];
  1861. for(i=0;i<nr_class*(nr_class-1)/2;i++)
  1862. model.rho[i] = f[i].rho;
  1863. if(param.probability == 1)
  1864. {
  1865. model.probA = new double[nr_class*(nr_class-1)/2];
  1866. model.probB = new double[nr_class*(nr_class-1)/2];
  1867. for(i=0;i<nr_class*(nr_class-1)/2;i++)
  1868. {
  1869. model.probA[i] = probA[i];
  1870. model.probB[i] = probB[i];
  1871. }
  1872. }
  1873. else
  1874. {
  1875. model.probA=null;
  1876. model.probB=null;
  1877. }
  1878. int nnz = 0;
  1879. int[] nz_count = new int[nr_class];
  1880. model.nSV = new int[nr_class];
  1881. for(i=0;i<nr_class;i++)
  1882. {
  1883. int nSV = 0;
  1884. for(int j=0;j<count[i];j++)
  1885. if(nonzero[start[i]+j])
  1886. {
  1887. ++nSV;
  1888. ++nnz;
  1889. }
  1890. model.nSV[i] = nSV;
  1891. nz_count[i] = nSV;
  1892. }
  1893. svm.info("Total nSV = "+nnz+"\n");
  1894. model.l = nnz;
  1895. model.SV = new svm_node[nnz][];
  1896. model.sv_indices = new int[nnz];
  1897. p = 0;
  1898. for(i=0;i<l;i++)
  1899. if(nonzero[i])
  1900. {
  1901. model.SV[p] = x[i];
  1902. model.sv_indices[p++] = perm[i] + 1;
  1903. }
  1904. int[] nz_start = new int[nr_class];
  1905. nz_start[0] = 0;
  1906. for(i=1;i<nr_class;i++)
  1907. nz_start[i] = nz_start[i-1]+nz_count[i-1];
  1908. model.sv_coef = new double[nr_class-1][];
  1909. for(i=0;i<nr_class-1;i++)
  1910. model.sv_coef[i] = new double[nnz];
  1911. p = 0;
  1912. for(i=0;i<nr_class;i++)
  1913. for(int j=i+1;j<nr_class;j++)
  1914. {
  1915. // classifier (i,j): coefficients with
  1916. // i are in sv_coef[j-1][nz_start[i]...],
  1917. // j are in sv_coef[i][nz_start[j]...]
  1918. int si = start[i];
  1919. int sj = start[j];
  1920. int ci = count[i];
  1921. int cj = count[j];
  1922. int q = nz_start[i];
  1923. int k;
  1924. for(k=0;k<ci;k++)
  1925. if(nonzero[si+k])
  1926. model.sv_coef[j-1][q++] = f[p].alpha[k];
  1927. q = nz_start[j];
  1928. for(k=0;k<cj;k++)
  1929. if(nonzero[sj+k])
  1930. model.sv_coef[i][q++] = f[p].alpha[ci+k];
  1931. ++p;
  1932. }
  1933. }
  1934. return model;
  1935. }
  1936. // Stratified cross validation
  1937. public static void svm_cross_validation(svm_problem prob, svm_parameter param, int nr_fold, double[] target)
  1938. {
  1939. int i;
  1940. int[] fold_start = new int[nr_fold+1];
  1941. int l = prob.l;
  1942. int[] perm = new int[l];
  1943. // stratified cv may not give leave-one-out rate
  1944. // Each class to l folds -> some folds may have zero elements
  1945. if((param.svm_type == svm_parameter.C_SVC ||
  1946. param.svm_type == svm_parameter.NU_SVC) && nr_fold < l)
  1947. {
  1948. int[] tmp_nr_class = new int[1];
  1949. int[][] tmp_label = new int[1][];
  1950. int[][] tmp_start = new int[1][];
  1951. int[][] tmp_count = new int[1][];
  1952. svm_group_classes(prob,tmp_nr_class,tmp_label,tmp_start,tmp_count,perm);
  1953. int nr_class = tmp_nr_class[0];
  1954. int[] start = tmp_start[0];
  1955. int[] count = tmp_count[0];
  1956. // random shuffle and then data grouped by fold using the array perm
  1957. int[] fold_count = new int[nr_fold];
  1958. int c;
  1959. int[] index = new int[l];
  1960. for(i=0;i<l;i++)
  1961. index[i]=perm[i];
  1962. for (c=0; c<nr_class; c++)
  1963. for(i=0;i<count[c];i++)
  1964. {
  1965. int j = i+rand.nextInt(count[c]-i);
  1966. swap(int,index[start[c]+j],index[start[c]+i]);
  1967. }
  1968. for(i=0;i<nr_fold;i++)
  1969. {
  1970. fold_count[i] = 0;
  1971. for (c=0; c<nr_class;c++)
  1972. fold_count[i]+=(i+1)*count[c]/nr_fold-i*count[c]/nr_fold;
  1973. }
  1974. fold_start[0]=0;
  1975. for (i=1;i<=nr_fold;i++)
  1976. fold_start[i] = fold_start[i-1]+fold_count[i-1];
  1977. for (c=0; c<nr_class;c++)
  1978. for(i=0;i<nr_fold;i++)
  1979. {
  1980. int begin = start[c]+i*count[c]/nr_fold;
  1981. int end = start[c]+(i+1)*count[c]/nr_fold;
  1982. for(int j=begin;j<end;j++)
  1983. {
  1984. perm[fold_start[i]] = index[j];
  1985. fold_start[i]++;
  1986. }
  1987. }
  1988. fold_start[0]=0;
  1989. for (i=1;i<=nr_fold;i++)
  1990. fold_start[i] = fold_start[i-1]+fold_count[i-1];
  1991. }
  1992. else
  1993. {
  1994. for(i=0;i<l;i++) perm[i]=i;
  1995. for(i=0;i<l;i++)
  1996. {
  1997. int j = i+rand.nextInt(l-i);
  1998. swap(int,perm[i],perm[j]);
  1999. }
  2000. for(i=0;i<=nr_fold;i++)
  2001. fold_start[i]=i*l/nr_fold;
  2002. }
  2003. for(i=0;i<nr_fold;i++)
  2004. {
  2005. int begin = fold_start[i];
  2006. int end = fold_start[i+1];
  2007. int j,k;
  2008. svm_problem subprob = new svm_problem();
  2009. subprob.l = l-(end-begin);
  2010. subprob.x = new svm_node[subprob.l][];
  2011. subprob.y = new double[subprob.l];
  2012. k=0;
  2013. for(j=0;j<begin;j++)
  2014. {
  2015. subprob.x[k] = prob.x[perm[j]];
  2016. subprob.y[k] = prob.y[perm[j]];
  2017. ++k;
  2018. }
  2019. for(j=end;j<l;j++)
  2020. {
  2021. subprob.x[k] = prob.x[perm[j]];
  2022. subprob.y[k] = prob.y[perm[j]];
  2023. ++k;
  2024. }
  2025. svm_model submodel = svm_train(subprob,param);
  2026. if(param.probability==1 &&
  2027. (param.svm_type == svm_parameter.C_SVC ||
  2028. param.svm_type == svm_parameter.NU_SVC))
  2029. {
  2030. double[] prob_estimates= new double[svm_get_nr_class(submodel)];
  2031. for(j=begin;j<end;j++)
  2032. target[perm[j]] = svm_predict_probability(submodel,prob.x[perm[j]],prob_estimates);
  2033. }
  2034. else
  2035. for(j=begin;j<end;j++)
  2036. target[perm[j]] = svm_predict(submodel,prob.x[perm[j]]);
  2037. }
  2038. }
  2039. public static int svm_get_svm_type(svm_model model)
  2040. {
  2041. return model.param.svm_type;
  2042. }
  2043. public static int svm_get_nr_class(svm_model model)
  2044. {
  2045. return model.nr_class;
  2046. }
  2047. public static void svm_get_labels(svm_model model, int[] label)
  2048. {
  2049. if (model.label != null)
  2050. for(int i=0;i<model.nr_class;i++)
  2051. label[i] = model.label[i];
  2052. }
  2053. public static void svm_get_sv_indices(svm_model model, int[] indices)
  2054. {
  2055. if (model.sv_indices != null)
  2056. for(int i=0;i<model.l;i++)
  2057. indices[i] = model.sv_indices[i];
  2058. }
  2059. public static int svm_get_nr_sv(svm_model model)
  2060. {
  2061. return model.l;
  2062. }
  2063. public static double svm_get_svr_probability(svm_model model)
  2064. {
  2065. if ((model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR) &&
  2066. model.probA!=null)
  2067. return model.probA[0];
  2068. else
  2069. {
  2070. System.err.print("Model doesn't contain information for SVR probability inference\n");
  2071. return 0;
  2072. }
  2073. }
  2074. public static double svm_predict_values(svm_model model, svm_node[] x, double[] dec_values)
  2075. {
  2076. int i;
  2077. if(model.param.svm_type == svm_parameter.ONE_CLASS ||
  2078. model.param.svm_type == svm_parameter.EPSILON_SVR ||
  2079. model.param.svm_type == svm_parameter.NU_SVR)
  2080. {
  2081. double[] sv_coef = model.sv_coef[0];
  2082. double sum = 0;
  2083. for(i=0;i<model.l;i++)
  2084. sum += sv_coef[i] * Kernel.k_function(x,model.SV[i],model.param);
  2085. sum -= model.rho[0];
  2086. dec_values[0] = sum;
  2087. if(model.param.svm_type == svm_parameter.ONE_CLASS)
  2088. return (sum>0)?1:-1;
  2089. else
  2090. return sum;
  2091. }
  2092. else
  2093. {
  2094. int nr_class = model.nr_class;
  2095. int l = model.l;
  2096. double[] kvalue = new double[l];
  2097. for(i=0;i<l;i++)
  2098. kvalue[i] = Kernel.k_function(x,model.SV[i],model.param);
  2099. int[] start = new int[nr_class];
  2100. start[0] = 0;
  2101. for(i=1;i<nr_class;i++)
  2102. start[i] = start[i-1]+model.nSV[i-1];
  2103. int[] vote = new int[nr_class];
  2104. for(i=0;i<nr_class;i++)
  2105. vote[i] = 0;
  2106. int p=0;
  2107. for(i=0;i<nr_class;i++)
  2108. for(int j=i+1;j<nr_class;j++)
  2109. {
  2110. double sum = 0;
  2111. int si = start[i];
  2112. int sj = start[j];
  2113. int ci = model.nSV[i];
  2114. int cj = model.nSV[j];
  2115. int k;
  2116. double[] coef1 = model.sv_coef[j-1];
  2117. double[] coef2 = model.sv_coef[i];
  2118. for(k=0;k<ci;k++)
  2119. sum += coef1[si+k] * kvalue[si+k];
  2120. for(k=0;k<cj;k++)
  2121. sum += coef2[sj+k] * kvalue[sj+k];
  2122. sum -= model.rho[p];
  2123. dec_values[p] = sum;
  2124. if(dec_values[p] > 0)
  2125. ++vote[i];
  2126. else
  2127. ++vote[j];
  2128. p++;
  2129. }
  2130. int vote_max_idx = 0;
  2131. for(i=1;i<nr_class;i++)
  2132. if(vote[i] > vote[vote_max_idx])
  2133. vote_max_idx = i;
  2134. return model.label[vote_max_idx];
  2135. }
  2136. }
  2137. public static double svm_predict(svm_model model, svm_node[] x)
  2138. {
  2139. int nr_class = model.nr_class;
  2140. double[] dec_values;
  2141. if(model.param.svm_type == svm_parameter.ONE_CLASS ||
  2142. model.param.svm_type == svm_parameter.EPSILON_SVR ||
  2143. model.param.svm_type == svm_parameter.NU_SVR)
  2144. dec_values = new double[1];
  2145. else
  2146. dec_values = new double[nr_class*(nr_class-1)/2];
  2147. double pred_result = svm_predict_values(model, x, dec_values);
  2148. return pred_result;
  2149. }
  2150. public static double svm_predict_probability(svm_model model, svm_node[] x, double[] prob_estimates)
  2151. {
  2152. if ((model.param.svm_type == svm_parameter.C_SVC || model.param.svm_type == svm_parameter.NU_SVC) &&
  2153. model.probA!=null && model.probB!=null)
  2154. {
  2155. int i;
  2156. int nr_class = model.nr_class;
  2157. double[] dec_values = new double[nr_class*(nr_class-1)/2];
  2158. svm_predict_values(model, x, dec_values);
  2159. double min_prob=1e-7;
  2160. double[][] pairwise_prob=new double[nr_class][nr_class];
  2161. int k=0;
  2162. for(i=0;i<nr_class;i++)
  2163. for(int j=i+1;j<nr_class;j++)
  2164. {
  2165. pairwise_prob[i][j]=Math.min(Math.max(sigmoid_predict(dec_values[k],model.probA[k],model.probB[k]),min_prob),1-min_prob);
  2166. pairwise_prob[j][i]=1-pairwise_prob[i][j];
  2167. k++;
  2168. }
  2169. if (nr_class == 2)
  2170. {
  2171. prob_estimates[0] = pairwise_prob[0][1];
  2172. prob_estimates[1] = pairwise_prob[1][0];
  2173. }
  2174. else
  2175. multiclass_probability(nr_class,pairwise_prob,prob_estimates);
  2176. int prob_max_idx = 0;
  2177. for(i=1;i<nr_class;i++)
  2178. if(prob_estimates[i] > prob_estimates[prob_max_idx])
  2179. prob_max_idx = i;
  2180. return model.label[prob_max_idx];
  2181. }
  2182. else
  2183. return svm_predict(model, x);
  2184. }
  2185. static final String svm_type_table[] =
  2186. {
  2187. "c_svc","nu_svc","one_class","epsilon_svr","nu_svr",
  2188. };
  2189. static final String kernel_type_table[]=
  2190. {
  2191. "linear","polynomial","rbf","sigmoid","precomputed"
  2192. };
  2193. public static void svm_save_model(String model_file_name, svm_model model) throws IOException
  2194. {
  2195. DataOutputStream fp = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(model_file_name)));
  2196. svm_parameter param = model.param;
  2197. fp.writeBytes("svm_type "+svm_type_table[param.svm_type]+"\n");
  2198. fp.writeBytes("kernel_type "+kernel_type_table[param.kernel_type]+"\n");
  2199. if(param.kernel_type == svm_parameter.POLY)
  2200. fp.writeBytes("degree "+param.degree+"\n");
  2201. if(param.kernel_type == svm_parameter.POLY ||
  2202. param.kernel_type == svm_parameter.RBF ||
  2203. param.kernel_type == svm_parameter.SIGMOID)
  2204. fp.writeBytes("gamma "+param.gamma+"\n");
  2205. if(param.kernel_type == svm_parameter.POLY ||
  2206. param.kernel_type == svm_parameter.SIGMOID)
  2207. fp.writeBytes("coef0 "+param.coef0+"\n");
  2208. int nr_class = model.nr_class;
  2209. int l = model.l;
  2210. fp.writeBytes("nr_class "+nr_class+"\n");
  2211. fp.writeBytes("total_sv "+l+"\n");
  2212. {
  2213. fp.writeBytes("rho");
  2214. for(int i=0;i<nr_class*(nr_class-1)/2;i++)
  2215. fp.writeBytes(" "+model.rho[i]);
  2216. fp.writeBytes("\n");
  2217. }
  2218. if(model.label != null)
  2219. {
  2220. fp.writeBytes("label");
  2221. for(int i=0;i<nr_class;i++)
  2222. fp.writeBytes(" "+model.label[i]);
  2223. fp.writeBytes("\n");
  2224. }
  2225. if(model.probA != null) // regression has probA only
  2226. {
  2227. fp.writeBytes("probA");
  2228. for(int i=0;i<nr_class*(nr_class-1)/2;i++)
  2229. fp.writeBytes(" "+model.probA[i]);
  2230. fp.writeBytes("\n");
  2231. }
  2232. if(model.probB != null)
  2233. {
  2234. fp.writeBytes("probB");
  2235. for(int i=0;i<nr_class*(nr_class-1)/2;i++)
  2236. fp.writeBytes(" "+model.probB[i]);
  2237. fp.writeBytes("\n");
  2238. }
  2239. if(model.nSV != null)
  2240. {
  2241. fp.writeBytes("nr_sv");
  2242. for(int i=0;i<nr_class;i++)
  2243. fp.writeBytes(" "+model.nSV[i]);
  2244. fp.writeBytes("\n");
  2245. }
  2246. fp.writeBytes("SV\n");
  2247. double[][] sv_coef = model.sv_coef;
  2248. svm_node[][] SV = model.SV;
  2249. for(int i=0;i<l;i++)
  2250. {
  2251. for(int j=0;j<nr_class-1;j++)
  2252. fp.writeBytes(sv_coef[j][i]+" ");
  2253. svm_node[] p = SV[i];
  2254. if(param.kernel_type == svm_parameter.PRECOMPUTED)
  2255. fp.writeBytes("0:"+(int)(p[0].value));
  2256. else
  2257. for(int j=0;j<p.length;j++)
  2258. fp.writeBytes(p[j].index+":"+p[j].value+" ");
  2259. fp.writeBytes("\n");
  2260. }
  2261. fp.close();
  2262. }
  2263. private static double atof(String s)
  2264. {
  2265. return Double.valueOf(s).doubleValue();
  2266. }
  2267. private static int atoi(String s)
  2268. {
  2269. return Integer.parseInt(s);
  2270. }
  2271. private static boolean read_model_header(BufferedReader fp, svm_model model)
  2272. {
  2273. svm_parameter param = new svm_parameter();
  2274. model.param = param;
  2275. // parameters for training only won't be assigned, but arrays are assigned as NULL for safety
  2276. param.nr_weight = 0;
  2277. param.weight_label = null;
  2278. param.weight = null;
  2279. try
  2280. {
  2281. while(true)
  2282. {
  2283. String cmd = fp.readLine();
  2284. String arg = cmd.substring(cmd.indexOf(' ')+1);
  2285. if(cmd.startsWith("svm_type"))
  2286. {
  2287. int i;
  2288. for(i=0;i<svm_type_table.length;i++)
  2289. {
  2290. if(arg.indexOf(svm_type_table[i])!=-1)
  2291. {
  2292. param.svm_type=i;
  2293. break;
  2294. }
  2295. }
  2296. if(i == svm_type_table.length)
  2297. {
  2298. System.err.print("unknown svm type.\n");
  2299. return false;
  2300. }
  2301. }
  2302. else if(cmd.startsWith("kernel_type"))
  2303. {
  2304. int i;
  2305. for(i=0;i<kernel_type_table.length;i++)
  2306. {
  2307. if(arg.indexOf(kernel_type_table[i])!=-1)
  2308. {
  2309. param.kernel_type=i;
  2310. break;
  2311. }
  2312. }
  2313. if(i == kernel_type_table.length)
  2314. {
  2315. System.err.print("unknown kernel function.\n");
  2316. return false;
  2317. }
  2318. }
  2319. else if(cmd.startsWith("degree"))
  2320. param.degree = atoi(arg);
  2321. else if(cmd.startsWith("gamma"))
  2322. param.gamma = atof(arg);
  2323. else if(cmd.startsWith("coef0"))
  2324. param.coef0 = atof(arg);
  2325. else if(cmd.startsWith("nr_class"))
  2326. model.nr_class = atoi(arg);
  2327. else if(cmd.startsWith("total_sv"))
  2328. model.l = atoi(arg);
  2329. else if(cmd.startsWith("rho"))
  2330. {
  2331. int n = model.nr_class * (model.nr_class-1)/2;
  2332. model.rho = new double[n];
  2333. StringTokenizer st = new StringTokenizer(arg);
  2334. for(int i=0;i<n;i++)
  2335. model.rho[i] = atof(st.nextToken());
  2336. }
  2337. else if(cmd.startsWith("label"))
  2338. {
  2339. int n = model.nr_class;
  2340. model.label = new int[n];
  2341. StringTokenizer st = new StringTokenizer(arg);
  2342. for(int i=0;i<n;i++)
  2343. model.label[i] = atoi(st.nextToken());
  2344. }
  2345. else if(cmd.startsWith("probA"))
  2346. {
  2347. int n = model.nr_class*(model.nr_class-1)/2;
  2348. model.probA = new double[n];
  2349. StringTokenizer st = new StringTokenizer(arg);
  2350. for(int i=0;i<n;i++)
  2351. model.probA[i] = atof(st.nextToken());
  2352. }
  2353. else if(cmd.startsWith("probB"))
  2354. {
  2355. int n = model.nr_class*(model.nr_class-1)/2;
  2356. model.probB = new double[n];
  2357. StringTokenizer st = new StringTokenizer(arg);
  2358. for(int i=0;i<n;i++)
  2359. model.probB[i] = atof(st.nextToken());
  2360. }
  2361. else if(cmd.startsWith("nr_sv"))
  2362. {
  2363. int n = model.nr_class;
  2364. model.nSV = new int[n];
  2365. StringTokenizer st = new StringTokenizer(arg);
  2366. for(int i=0;i<n;i++)
  2367. model.nSV[i] = atoi(st.nextToken());
  2368. }
  2369. else if(cmd.startsWith("SV"))
  2370. {
  2371. break;
  2372. }
  2373. else
  2374. {
  2375. System.err.print("unknown text in model file: ["+cmd+"]\n");
  2376. return false;
  2377. }
  2378. }
  2379. }
  2380. catch(Exception e)
  2381. {
  2382. return false;
  2383. }
  2384. return true;
  2385. }
  2386. public static svm_model svm_load_model(String model_file_name) throws IOException
  2387. {
  2388. return svm_load_model(new BufferedReader(new FileReader(model_file_name)));
  2389. }
  2390. public static svm_model svm_load_model(BufferedReader fp) throws IOException
  2391. {
  2392. // read parameters
  2393. svm_model model = new svm_model();
  2394. model.rho = null;
  2395. model.probA = null;
  2396. model.probB = null;
  2397. model.label = null;
  2398. model.nSV = null;
  2399. if (read_model_header(fp, model) == false)
  2400. {
  2401. System.err.print("ERROR: failed to read model\n");
  2402. return null;
  2403. }
  2404. // read sv_coef and SV
  2405. int m = model.nr_class - 1;
  2406. int l = model.l;
  2407. model.sv_coef = new double[m][l];
  2408. model.SV = new svm_node[l][];
  2409. for(int i=0;i<l;i++)
  2410. {
  2411. String line = fp.readLine();
  2412. StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
  2413. for(int k=0;k<m;k++)
  2414. model.sv_coef[k][i] = atof(st.nextToken());
  2415. int n = st.countTokens()/2;
  2416. model.SV[i] = new svm_node[n];
  2417. for(int j=0;j<n;j++)
  2418. {
  2419. model.SV[i][j] = new svm_node();
  2420. model.SV[i][j].index = atoi(st.nextToken());
  2421. model.SV[i][j].value = atof(st.nextToken());
  2422. }
  2423. }
  2424. fp.close();
  2425. return model;
  2426. }
  2427. public static String svm_check_parameter(svm_problem prob, svm_parameter param)
  2428. {
  2429. // svm_type
  2430. int svm_type = param.svm_type;
  2431. if(svm_type != svm_parameter.C_SVC &&
  2432. svm_type != svm_parameter.NU_SVC &&
  2433. svm_type != svm_parameter.ONE_CLASS &&
  2434. svm_type != svm_parameter.EPSILON_SVR &&
  2435. svm_type != svm_parameter.NU_SVR)
  2436. return "unknown svm type";
  2437. // kernel_type, degree
  2438. int kernel_type = param.kernel_type;
  2439. if(kernel_type != svm_parameter.LINEAR &&
  2440. kernel_type != svm_parameter.POLY &&
  2441. kernel_type != svm_parameter.RBF &&
  2442. kernel_type != svm_parameter.SIGMOID &&
  2443. kernel_type != svm_parameter.PRECOMPUTED)
  2444. return "unknown kernel type";
  2445. if(param.gamma < 0)
  2446. return "gamma < 0";
  2447. if(param.degree < 0)
  2448. return "degree of polynomial kernel < 0";
  2449. // cache_size,eps,C,nu,p,shrinking
  2450. if(param.cache_size <= 0)
  2451. return "cache_size <= 0";
  2452. if(param.eps <= 0)
  2453. return "eps <= 0";
  2454. if(svm_type == svm_parameter.C_SVC ||
  2455. svm_type == svm_parameter.EPSILON_SVR ||
  2456. svm_type == svm_parameter.NU_SVR)
  2457. if(param.C <= 0)
  2458. return "C <= 0";
  2459. if(svm_type == svm_parameter.NU_SVC ||
  2460. svm_type == svm_parameter.ONE_CLASS ||
  2461. svm_type == svm_parameter.NU_SVR)
  2462. if(param.nu <= 0 || param.nu > 1)
  2463. return "nu <= 0 or nu > 1";
  2464. if(svm_type == svm_parameter.EPSILON_SVR)
  2465. if(param.p < 0)
  2466. return "p < 0";
  2467. if(param.shrinking != 0 &&
  2468. param.shrinking != 1)
  2469. return "shrinking != 0 and shrinking != 1";
  2470. if(param.probability != 0 &&
  2471. param.probability != 1)
  2472. return "probability != 0 and probability != 1";
  2473. if(param.probability == 1 &&
  2474. svm_type == svm_parameter.ONE_CLASS)
  2475. return "one-class SVM probability output not supported yet";
  2476. // check whether nu-svc is feasible
  2477. if(svm_type == svm_parameter.NU_SVC)
  2478. {
  2479. int l = prob.l;
  2480. int max_nr_class = 16;
  2481. int nr_class = 0;
  2482. int[] label = new int[max_nr_class];
  2483. int[] count = new int[max_nr_class];
  2484. int i;
  2485. for(i=0;i<l;i++)
  2486. {
  2487. int this_label = (int)prob.y[i];
  2488. int j;
  2489. for(j=0;j<nr_class;j++)
  2490. if(this_label == label[j])
  2491. {
  2492. ++count[j];
  2493. break;
  2494. }
  2495. if(j == nr_class)
  2496. {
  2497. if(nr_class == max_nr_class)
  2498. {
  2499. max_nr_class *= 2;
  2500. int[] new_data = new int[max_nr_class];
  2501. System.arraycopy(label,0,new_data,0,label.length);
  2502. label = new_data;
  2503. new_data = new int[max_nr_class];
  2504. System.arraycopy(count,0,new_data,0,count.length);
  2505. count = new_data;
  2506. }
  2507. label[nr_class] = this_label;
  2508. count[nr_class] = 1;
  2509. ++nr_class;
  2510. }
  2511. }
  2512. for(i=0;i<nr_class;i++)
  2513. {
  2514. int n1 = count[i];
  2515. for(int j=i+1;j<nr_class;j++)
  2516. {
  2517. int n2 = count[j];
  2518. if(param.nu*(n1+n2)/2 > Math.min(n1,n2))
  2519. return "specified nu is infeasible";
  2520. }
  2521. }
  2522. }
  2523. return null;
  2524. }
  2525. public static int svm_check_probability_model(svm_model model)
  2526. {
  2527. if (((model.param.svm_type == svm_parameter.C_SVC || model.param.svm_type == svm_parameter.NU_SVC) &&
  2528. model.probA!=null && model.probB!=null) ||
  2529. ((model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR) &&
  2530. model.probA!=null))
  2531. return 1;
  2532. else
  2533. return 0;
  2534. }
  2535. public static void svm_set_print_string_function(svm_print_interface print_func)
  2536. {
  2537. if (print_func == null)
  2538. svm_print_string = svm_print_stdout;
  2539. else
  2540. svm_print_string = print_func;
  2541. }
  2542. }

A Python package for graph kernels, graph edit distances and graph pre-image problem.