#
source:
orange/source/orange/libsvm_interface.cpp
@
11650:ed138a0a0d5c

Revision 11650:ed138a0a0d5c, 26.0 KB checked in by Ales Erjavec <ales.erjavec@…>, 8 months ago (diff) |
---|

Rev | Line | |
---|---|---|

[8978] | 1 | /* |

2 | This file is part of Orange. | |

3 | ||

4 | Copyright 1996-2011 Faculty of Computer and Information Science, University of Ljubljana | |

5 | Contact: janez.demsar@fri.uni-lj.si | |

6 | ||

7 | Orange is free software: you can redistribute it and/or modify | |

8 | it under the terms of the GNU General Public License as published by | |

9 | the Free Software Foundation, either version 3 of the License, or | |

10 | (at your option) any later version. | |

11 | ||

12 | Orange is distributed in the hope that it will be useful, | |

13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |

14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |

15 | GNU General Public License for more details. | |

16 | ||

17 | You should have received a copy of the GNU General Public License | |

18 | along with Orange. If not, see <http://www.gnu.org/licenses/>. | |

19 | */ | |

20 | ||

21 | #include <iostream> | |

22 | #include <sstream> | |

23 | ||

[11650] | 24 | #include "libsvm_interface.hpp" |

25 | #include "symmatrix.hpp" | |

26 | ||

27 | #include <algorithm> | |

28 | #include <cmath> | |

29 | ||

[8978] | 30 | |

31 | // Defined in svm.cpp. If new svm or kernel types are added this should be updated. | |

32 | ||

33 | static const char *svm_type_table[] = | |

34 | { | |

35 | "c_svc","nu_svc","one_class","epsilon_svr","nu_svr",NULL | |

36 | }; | |

37 | ||

38 | static const char *kernel_type_table[]= | |

39 | { | |

40 | "linear","polynomial","rbf","sigmoid","precomputed",NULL | |

41 | }; | |

42 | ||

43 | #define Malloc(type,n) (type *)malloc((n)*sizeof(type)) | |

44 | ||

45 | /* | |

46 | * Save load functions for use with orange pickling. | |

47 | * They are a copy of the standard libSVM save-load functions | |

48 | * except that they read/write from/to std::iostream objects. | |

49 | */ | |

50 | ||

51 | int svm_save_model_alt(std::ostream& stream, const svm_model *model){ | |

52 | const svm_parameter& param = model->param; | |

53 | stream.precision(17); | |

54 | ||

55 | stream << "svm_type " << svm_type_table[param.svm_type] << endl; | |

56 | stream << "kernel_type " << kernel_type_table[param.kernel_type] << endl; | |

57 | ||

58 | if(param.kernel_type == POLY) | |

59 | stream << "degree " << param.degree << endl; | |

60 | ||

61 | if(param.kernel_type == POLY || param.kernel_type == RBF || param.kernel_type == SIGMOID) | |

62 | stream << "gamma " << param.gamma << endl; | |

63 | ||

64 | if(param.kernel_type == POLY || param.kernel_type == SIGMOID) | |

65 | stream << "coef0 " << param.coef0 << endl; | |

66 | ||

67 | int nr_class = model->nr_class; | |

68 | int l = model->l; | |

69 | stream << "nr_class " << nr_class << endl; | |

70 | stream << "total_sv " << l << endl; | |

71 | { | |

72 | stream << "rho"; | |

73 | for(int i=0;i<nr_class*(nr_class-1)/2;i++) | |

74 | stream << " " << model->rho[i]; | |

75 | stream << endl; | |

76 | } | |

77 | ||

78 | if(model->label) | |

79 | { | |

80 | stream << "label"; | |

81 | for(int i=0;i<nr_class;i++) | |

82 | stream << " " << model->label[i]; | |

83 | stream << endl; | |

84 | } | |

85 | ||

86 | if(model->probA) // regression has probA only | |

87 | { | |

88 | stream << "probA"; | |

89 | for(int i=0;i<nr_class*(nr_class-1)/2;i++) | |

90 | stream << " " << model->probA[i]; | |

91 | stream << endl; | |

92 | } | |

93 | if(model->probB) | |

94 | { | |

95 | stream << "probB"; | |

96 | for(int i=0;i<nr_class*(nr_class-1)/2;i++) | |

97 | stream << " " << model->probB[i]; | |

98 | stream << endl; | |

99 | } | |

100 | ||

101 | if(model->nSV) | |

102 | { | |

103 | stream << "nr_sv"; | |

104 | for(int i=0;i<nr_class;i++) | |

105 | stream << " " << model->nSV[i]; | |

106 | stream << endl; | |

107 | } | |

108 | ||

109 | stream << "SV" << endl; | |

110 | const double * const *sv_coef = model->sv_coef; | |

111 | const svm_node * const *SV = model->SV; | |

112 | ||

113 | for(int i=0;i<l;i++) | |

114 | { | |

115 | for(int j=0;j<nr_class-1;j++) | |

116 | stream << sv_coef[j][i] << " "; | |

117 | ||

118 | const svm_node *p = SV[i]; | |

119 | ||

120 | if(param.kernel_type == PRECOMPUTED) | |

[9039] | 121 | stream << "0:" << (int)(p->value) << " "; |

[8978] | 122 | else |

123 | while(p->index != -1) | |

124 | { | |

125 | stream << (int)(p->index) << ":" << p->value << " "; | |

126 | p++; | |

127 | } | |

128 | stream << endl; | |

129 | } | |

130 | ||

131 | if (!stream.fail()) | |

132 | return 0; | |

133 | else | |

134 | return 1; | |

135 | } | |

136 | ||

137 | int svm_save_model_alt(std::string& buffer, const svm_model *model){ | |

138 | std::ostringstream strstream; | |

139 | int ret = svm_save_model_alt(strstream, model); | |

140 | buffer = strstream.rdbuf()->str(); | |

141 | return ret; | |

142 | } | |

143 | ||

144 | ||

145 | svm_model *svm_load_model_alt(std::istream& stream) | |

146 | { | |

147 | svm_model *model = Malloc(svm_model,1); | |

148 | svm_parameter& param = model->param; | |

149 | model->rho = NULL; | |

150 | model->probA = NULL; | |

151 | model->probB = NULL; | |

152 | model->label = NULL; | |

153 | model->nSV = NULL; | |

154 | ||

155 | char cmd[81]; | |

156 | stream.width(80); | |

157 | while (stream.good()) | |

158 | { | |

159 | stream >> cmd; | |

160 | ||

161 | if(strcmp(cmd, "svm_type") == 0) | |

162 | { | |

163 | stream >> cmd; | |

164 | int i; | |

165 | for(i=0; svm_type_table[i]; i++) | |

166 | { | |

167 | if(strcmp(cmd, svm_type_table[i]) == 0) | |

168 | { | |

169 | param.svm_type=i; | |

170 | break; | |

171 | } | |

172 | } | |

173 | if(svm_type_table[i] == NULL) | |

174 | { | |

175 | fprintf(stderr, "unknown svm type.\n"); | |

176 | free(model->rho); | |

177 | free(model->label); | |

178 | free(model->nSV); | |

179 | free(model); | |

180 | return NULL; | |

181 | } | |

182 | } | |

183 | else if(strcmp(cmd, "kernel_type") == 0) | |

184 | { | |

185 | stream >> cmd; | |

186 | int i; | |

187 | for(i=0;kernel_type_table[i];i++) | |

188 | { | |

189 | if(strcmp(kernel_type_table[i], cmd)==0) | |

190 | { | |

191 | param.kernel_type=i; | |

192 | break; | |

193 | } | |

194 | } | |

195 | if(kernel_type_table[i] == NULL) | |

196 | { | |

197 | fprintf(stderr,"unknown kernel function.\n"); | |

198 | free(model->rho); | |

199 | free(model->label); | |

200 | free(model->nSV); | |

201 | free(model); | |

202 | return NULL; | |

203 | } | |

204 | } | |

205 | else if(strcmp(cmd,"degree")==0) | |

206 | stream >> param.degree; | |

207 | else if(strcmp(cmd,"gamma")==0) | |

208 | stream >> param.gamma; | |

209 | else if(strcmp(cmd,"coef0")==0) | |

210 | stream >> param.coef0; | |

211 | else if(strcmp(cmd,"nr_class")==0) | |

212 | stream >> model->nr_class; | |

213 | else if(strcmp(cmd,"total_sv")==0) | |

214 | stream >> model->l; | |

215 | else if(strcmp(cmd,"rho")==0) | |

216 | { | |

217 | int n = model->nr_class * (model->nr_class-1)/2; | |

218 | model->rho = Malloc(double,n); | |

[9389] | 219 | string rho_str; |

220 | for(int i=0;i<n;i++){ | |

221 | // Read the number into a string and then use strtod | |

222 | // for proper handling of NaN's | |

223 | stream >> rho_str; | |

224 | model->rho[i] = strtod(rho_str.c_str(), NULL); | |

225 | } | |

[8978] | 226 | } |

227 | else if(strcmp(cmd,"label")==0) | |

228 | { | |

229 | int n = model->nr_class; | |

230 | model->label = Malloc(int,n); | |

231 | for(int i=0;i<n;i++) | |

232 | stream >> model->label[i]; | |

233 | } | |

234 | else if(strcmp(cmd,"probA")==0) | |

235 | { | |

236 | int n = model->nr_class * (model->nr_class-1)/2; | |

237 | model->probA = Malloc(double,n); | |

238 | for(int i=0;i<n;i++) | |

239 | stream >> model->probA[i]; | |

240 | } | |

241 | else if(strcmp(cmd,"probB")==0) | |

242 | { | |

243 | int n = model->nr_class * (model->nr_class-1)/2; | |

244 | model->probB = Malloc(double,n); | |

245 | for(int i=0;i<n;i++) | |

246 | stream >> model->probB[i]; | |

247 | } | |

248 | else if(strcmp(cmd,"nr_sv")==0) | |

249 | { | |

250 | int n = model->nr_class; | |

251 | model->nSV = Malloc(int,n); | |

252 | for(int i=0;i<n;i++) | |

253 | stream >> model->nSV[i]; | |

254 | } | |

255 | else if(strcmp(cmd,"SV")==0) | |

256 | { | |

257 | while(1) | |

258 | { | |

259 | int c = stream.get(); | |

260 | if(stream.eof() || c=='\n') break; | |

261 | } | |

262 | break; | |

263 | } | |

264 | else | |

265 | { | |

266 | fprintf(stderr,"unknown text in model file: [%s]\n",cmd); | |

267 | free(model->rho); | |

268 | free(model->label); | |

269 | free(model->nSV); | |

270 | free(model); | |

271 | return NULL; | |

272 | } | |

273 | } | |

274 | if (stream.fail()){ | |

275 | free(model->rho); | |

276 | free(model->label); | |

277 | free(model->nSV); | |

278 | free(model); | |

279 | return NULL; | |

280 | ||

281 | } | |

282 | ||

283 | // read sv_coef and SV | |

284 | ||

285 | int elements = 0; | |

286 | long pos = stream.tellg(); | |

287 | ||

288 | char *p,*endptr,*idx,*val; | |

289 | string str_line; | |

290 | while (!stream.eof() && !stream.fail()) | |

291 | { | |

292 | getline(stream, str_line); | |

293 | elements += std::count(str_line.begin(), str_line.end(), ':'); | |

294 | } | |

295 | ||

296 | elements += model->l; | |

297 | ||

298 | stream.clear(); | |

299 | stream.seekg(pos, ios::beg); | |

300 | ||

301 | int m = model->nr_class - 1; | |

302 | int l = model->l; | |

303 | model->sv_coef = Malloc(double *,m); | |

304 | int i; | |

305 | for(i=0;i<m;i++) | |

306 | model->sv_coef[i] = Malloc(double,l); | |

307 | model->SV = Malloc(svm_node*,l); | |

308 | svm_node *x_space = NULL; | |

309 | if(l>0) x_space = Malloc(svm_node,elements); | |

310 | ||

311 | int j=0; | |

312 | char *line; | |

313 | for(i=0;i<l;i++) | |

314 | { | |

315 | getline(stream, str_line); | |

316 | if (str_line.size() == 0) | |

317 | continue; | |

318 | ||

319 | line = (char *) Malloc(char, str_line.size() + 1); | |

320 | // Copy the line for strtok. | |

321 | strcpy(line, str_line.c_str()); | |

322 | ||

323 | model->SV[i] = &x_space[j]; | |

324 | ||

325 | p = strtok(line, " \t"); | |

326 | model->sv_coef[0][i] = strtod(p,&endptr); | |

327 | for(int k=1;k<m;k++) | |

328 | { | |

329 | p = strtok(NULL, " \t"); | |

330 | model->sv_coef[k][i] = strtod(p,&endptr); | |

331 | } | |

332 | ||

333 | while(1) | |

334 | { | |

335 | idx = strtok(NULL, ":"); | |

336 | val = strtok(NULL, " \t"); | |

337 | ||

338 | if(val == NULL) | |

339 | break; | |

340 | x_space[j].index = (int) strtol(idx,&endptr,10); | |

341 | x_space[j].value = strtod(val,&endptr); | |

342 | ||

343 | ++j; | |

344 | } | |

345 | x_space[j++].index = -1; | |

346 | free(line); | |

347 | } | |

348 | ||

349 | if (stream.fail()) | |

350 | return NULL; | |

351 | ||

352 | model->free_sv = 1; // XXX | |

353 | return model; | |

354 | } | |

355 | ||

[11650] | 356 | |

[8978] | 357 | svm_model *svm_load_model_alt(std::string& stream) |

358 | { | |

359 | std::istringstream strstream(stream); | |

360 | return svm_load_model_alt(strstream); | |

361 | } | |

362 | ||

[11606] | 363 | |

[11650] | 364 | std::ostream & svm_node_vector_to_stream(std::ostream & stream, const svm_node * node) { |

365 | while (node->index != -1) { | |

366 | stream << node->index << ":" << node->value << " "; | |

367 | node++; | |

368 | } | |

369 | stream << node->index << ":" << node->value; | |

370 | return stream; | |

371 | } | |

372 | ||

373 | std::ostream & operator << (std::ostream & stream, const svm_problem & problem) { | |

374 | svm_node * node = NULL; | |

375 | for (int i = 0; i < problem.l; i++) { | |

376 | stream << problem.y[i] << " "; | |

377 | svm_node_vector_to_stream(stream, problem.x[i]); | |

378 | stream << endl; | |

379 | } | |

380 | return stream; | |

381 | } | |

382 | ||

[11606] | 383 | /* |

384 | * Return a formated string representing a svm data instance (svm_node *) | |

385 | * (useful for debugging) | |

386 | */ | |

[11650] | 387 | std::string svm_node_to_string(const svm_node * node) { |

[11606] | 388 | std::ostringstream strstream; |

389 | strstream.precision(17); | |

[11650] | 390 | svm_node_vector_to_stream(strstream, node); |

[11606] | 391 | return strstream.rdbuf()->str(); |

392 | } | |

393 | ||

394 | ||

[11650] | 395 | #ifdef _MSC_VER |

396 | #include <float.h> | |

397 | #define isfinite _finite | |

398 | #endif | |

399 | ||

400 | /*! | |

401 | * Check if the value is valid (not a special value in 'TValue'). | |

402 | */ | |

403 | ||

404 | inline bool is_valid(double value) { | |

405 | return isfinite(value) && value != numeric_limits<int>::max(); | |

406 | } | |

407 | ||

408 | ||

409 | svm_node* example_to_svm(const TExample &ex, svm_node* node, double last=0.0) { | |

410 | int index = 1; | |

411 | double value = 0.0; | |

412 | TExample::iterator values_end; | |

413 | ||

414 | if (ex.domain->classVar) { | |

415 | values_end = ex.end() - 1; | |

416 | } else { | |

417 | values_end = ex.end(); | |

418 | } | |

419 | ||

420 | for(TExample::iterator iter = ex.begin(); iter != values_end; iter++, index++) { | |

421 | if(iter->isRegular()) { | |

422 | if(iter->varType == TValue::FLOATVAR) { | |

423 | value = iter->floatV; | |

424 | } else if (iter->varType == TValue::INTVAR) { | |

425 | value = iter->intV; | |

426 | } else { | |

427 | continue; | |

428 | } | |

429 | ||

430 | // Only add non zero values (speedup due to sparseness) | |

431 | if (value != 0 && is_valid(value)) { | |

432 | node->index = index; | |

433 | node->value = value; | |

[8978] | 434 | node++; |

435 | } | |

436 | } | |

437 | } | |

[11650] | 438 | |

439 | // Sentinel | |

440 | node->index = -1; | |

441 | node->value = last; | |

[8978] | 442 | node++; |

443 | return node; | |

444 | } | |

445 | ||

446 | class SVM_NodeSort{ | |

447 | public: | |

[11650] | 448 | bool operator() (const svm_node &lhs, const svm_node &rhs) { |

[8978] | 449 | return lhs.index < rhs.index; |

450 | } | |

451 | }; | |

452 | ||

[11650] | 453 | svm_node* example_to_svm_sparse(const TExample &ex, svm_node* node, double last=0.0, bool include_regular=false) { |

454 | svm_node *first = node; | |

455 | int index = 1; | |

456 | double value; | |

457 | ||

458 | if (include_regular) { | |

459 | node = example_to_svm(ex, node); | |

460 | // Rewind the sentinel | |

461 | node--; | |

462 | assert(node->index == -1); | |

463 | index += ex.domain->variables->size(); | |

464 | } | |

465 | ||

466 | for (TMetaValues::const_iterator iter=ex.meta.begin(); iter!=ex.meta.end(); iter++) { | |

467 | if(iter->second.isRegular()) { | |

468 | if(iter->second.varType == TValue::FLOATVAR) { | |

469 | value = iter->second.floatV; | |

470 | } else if (iter->second.varType == TValue::INTVAR) { | |

471 | value = iter->second.intV; | |

472 | } else { | |

473 | continue; | |

474 | } | |

475 | ||

476 | if (value != 0 && is_valid(value)) { | |

477 | // add the (- meta_id) to index; meta_ids are negative | |

478 | node->index = index - iter->first; | |

479 | node->value = value; | |

[8978] | 480 | node++; |

481 | } | |

482 | } | |

483 | } | |

[11650] | 484 | |

485 | // sort the nodes by index (metas are not ordered) | |

[8978] | 486 | sort(first, node, SVM_NodeSort()); |

[11650] | 487 | |

488 | // Sentinel | |

489 | node->index = -1; | |

490 | node->value = last; | |

[8978] | 491 | node++; |

492 | return node; | |

493 | } | |

494 | ||

495 | /* | |

496 | * Precompute Gram matrix row for ex. | |

497 | * Used for prediction when using the PRECOMPUTED kernel. | |

498 | */ | |

[11650] | 499 | svm_node* example_to_svm_precomputed(const TExample &ex, PExampleGenerator examples, PKernelFunc kernel, svm_node* node) { |

500 | // Required node with index 0 | |

[8978] | 501 | node->index = 0; |

[11606] | 502 | node->value = 0.0; // Can be any value. |

[8978] | 503 | node++; |

504 | int k = 0; | |

505 | PEITERATE(iter, examples){ | |

506 | node->index = ++k; | |

[11650] | 507 | node->value = kernel->operator()(*iter, ex); |

[8978] | 508 | node++; |

509 | } | |

[11650] | 510 | |

511 | // Sentinel | |

512 | node->index = -1; | |

[8978] | 513 | node++; |

514 | return node; | |

515 | } | |

516 | ||

517 | int getNumOfElements(const TExample &ex, bool meta=false, bool useNonMeta=false){ | |

[11650] | 518 | if (!meta) |

519 | return std::max(ex.domain->attributes->size() + 1, 2); | |

520 | else { | |

521 | int count = 1; // we need one to indicate the end of a sequence | |

522 | if (useNonMeta) | |

523 | count += ex.domain->attributes->size(); | |

524 | for (TMetaValues::const_iterator iter=ex.meta.begin(); iter!=ex.meta.end();iter++) | |

525 | if(iter->second.isRegular()) | |

[8978] | 526 | count++; |

527 | return std::max(count,2); | |

528 | } | |

529 | } | |

530 | ||

[11650] | 531 | int getNumOfElements(PExampleGenerator &examples, bool meta=false, bool useNonMeta=false) { |

532 | if (!meta) | |

533 | return getNumOfElements(*(examples->begin()), meta) * examples->numberOfExamples(); | |

534 | else { | |

535 | int count = 0; | |

[8978] | 536 | for(TExampleGenerator::iterator ex(examples->begin()); ex!=examples->end(); ++ex){ |

[11650] | 537 | count += getNumOfElements(*ex, meta, useNonMeta); |

[8978] | 538 | } |

539 | return count; | |

540 | } | |

541 | } | |

542 | ||

543 | svm_node* init_precomputed_problem(svm_problem &problem, PExampleTable examples, TKernelFunc &kernel){ | |

544 | int n_examples = examples->numberOfExamples(); | |

545 | int i,j; | |

546 | PSymMatrix matrix = mlnew TSymMatrix(n_examples, 0.0); | |

547 | for (i = 0; i < n_examples; i++) | |

548 | for (j = 0; j <= i; j++){ | |

549 | matrix->getref(i, j) = kernel(examples->at(i), examples->at(j)); | |

550 | } | |

551 | svm_node *x_space = Malloc(svm_node, n_examples * (n_examples + 2)); | |

552 | svm_node *node = x_space; | |

553 | ||

554 | problem.l = n_examples; | |

555 | problem.x = Malloc(svm_node*, n_examples); | |

556 | problem.y = Malloc(double, n_examples); | |

557 | ||

558 | for (i = 0; i < n_examples; i++){ | |

559 | problem.x[i] = node; | |

560 | if (examples->domain->classVar->varType == TValue::FLOATVAR) | |

561 | problem.y[i] = examples->at(i).getClass().floatV; | |

562 | else | |

563 | problem.y[i] = examples->at(i).getClass().intV; | |

564 | ||

565 | node->index = 0; | |

566 | node->value = i + 1; // instance indices are 1 based | |

567 | node++; | |

568 | for (j = 0; j < n_examples; j++){ | |

569 | node->index = j + 1; | |

570 | node->value = matrix->getitem(i, j); | |

571 | node++; | |

572 | } | |

573 | node->index = -1; // sentry | |

574 | node++; | |

575 | } | |

576 | return x_space; | |

577 | } | |

578 | ||

[11606] | 579 | /* |

580 | * Extract an ExampleTable corresponding to the support vectors from the | |

581 | * trained model. | |

582 | */ | |

583 | PExampleTable extract_support_vectors(svm_model * model, PExampleTable train_instances) | |

584 | { | |

585 | PExampleTable vectors = mlnew TExampleTable(train_instances->domain); | |

586 | ||

587 | for (int i = 0; i < model->l; i++) { | |

588 | svm_node *node = model->SV[i]; | |

589 | int sv_index = -1; | |

590 | if(model->param.kernel_type != PRECOMPUTED){ | |

591 | /* The value of the last node (with index == -1) holds the | |

592 | * index of the training example. | |

593 | */ | |

594 | while(node->index != -1) { | |

595 | node++; | |

596 | } | |

597 | sv_index = int(node->value); | |

598 | } else { | |

599 | /* The value of the first node contains the training instance | |

600 | * index (indices 1 based). | |

601 | */ | |

602 | sv_index = int(node->value) - 1; | |

603 | } | |

604 | vectors->addExample(mlnew TExample(train_instances->at(sv_index))); | |

605 | } | |

606 | ||

607 | return vectors; | |

608 | } | |

609 | ||

610 | ||

611 | /* | |

612 | * Consolidate model->SV[1] .. SV[l] vectors into a single contiguous | |

613 | * memory block. The model will 'own' the new *(model->SV) array and | |

614 | * will be freed in destroy_svm_model (model->free_sv == 1). Note that | |

615 | * the original 'x_space' is left intact, it is the caller's | |

616 | * responsibility to free it. However the model->SV array itself is | |

617 | * reused (overwritten). | |

618 | */ | |

619 | ||

620 | void svm_model_consolidate_SV(svm_model * model) { | |

621 | int count = 0; | |

622 | svm_node * x_space = NULL; | |

623 | svm_node * ptr = NULL; | |

624 | svm_node * ptr_source = NULL; | |

625 | ||

626 | // Count the number of elements. | |

627 | for (int i = 0; i < model->l; i++) { | |

628 | ptr = model->SV[i]; | |

629 | while (ptr->index != -1){ | |

630 | count++; | |

631 | ptr++; | |

632 | } | |

633 | } | |

634 | // add the sentinel count | |

635 | count += model->l; | |

636 | ||

637 | x_space = Malloc(svm_node, count); | |

638 | ptr = x_space; | |

639 | for (int i = 0; i < model->l; i++) { | |

640 | ptr_source = model->SV[i]; | |

641 | model->SV[i] = ptr; | |

642 | while (ptr_source->index != -1) { | |

643 | *(ptr++) = *(ptr_source++); | |

644 | } | |

645 | // copy the sentinel | |

646 | *(ptr++) = *(ptr_source++); | |

647 | } | |

648 | model->free_sv = 1; // XXX | |

649 | } | |

650 | ||

[8978] | 651 | static void print_string_null(const char* s) {} |

652 | ||

[11606] | 653 | |

[8978] | 654 | TSVMLearner::TSVMLearner(){ |

655 | svm_type = NU_SVC; | |

656 | kernel_type = RBF; | |

657 | degree = 3; | |

658 | gamma = 0; | |

659 | coef0 = 0; | |

660 | nu = 0.5; | |

661 | cache_size = 250; | |

662 | C = 1; | |

663 | eps = 1e-3f; | |

664 | p = 0.1f; | |

665 | shrinking = 1; | |

666 | probability = 0; | |

667 | verbose = false; | |

668 | nr_weight = 0; | |

669 | weight_label = NULL; | |

670 | weight = NULL; | |

671 | }; | |

672 | ||

[11606] | 673 | |

[8978] | 674 | PClassifier TSVMLearner::operator ()(PExampleGenerator examples, const int&){ |

675 | svm_parameter param; | |

676 | svm_problem prob; | |

677 | svm_model* model; | |

678 | svm_node* x_space; | |

679 | ||

[11608] | 680 | PDomain domain = examples->domain; |

681 | ||

[8978] | 682 | int classVarType; |

[11608] | 683 | if (domain->classVar) |

684 | classVarType = domain->classVar->varType; | |

[11609] | 685 | else { |

[11608] | 686 | classVarType = TValue::NONE; |

687 | if(svm_type != ONE_CLASS) | |

[8978] | 688 | raiseError("Domain has no class variable"); |

689 | } | |

[11608] | 690 | if (classVarType == TValue::FLOATVAR && !(svm_type == EPSILON_SVR || svm_type == NU_SVR ||svm_type == ONE_CLASS)) |

[8978] | 691 | raiseError("Domain has continuous class"); |

692 | ||

[11608] | 693 | if (kernel_type == PRECOMPUTED && !kernelFunc) |

[8978] | 694 | raiseError("Custom kernel function not supplied"); |

695 | ||

[11608] | 696 | PExampleTable train_data = mlnew TExampleTable(examples, /* owns= */ false); |

[8978] | 697 | |

[11608] | 698 | if (classVarType == TValue::INTVAR && svm_type != ONE_CLASS) { |

699 | /* Sort the train data by the class columns so the order of | |

700 | * classVar.values is preserved in libsvm's model. | |

701 | */ | |

702 | vector<int> sort_columns(domain->variables->size() - 1); | |

703 | train_data->sort(sort_columns); | |

704 | } | |

705 | ||

[11609] | 706 | // Initialize svm parameters |

707 | param.svm_type = svm_type; | |

708 | param.kernel_type = kernel_type; | |

709 | param.degree = degree; | |

710 | param.gamma = gamma; | |

711 | param.coef0 = coef0; | |

712 | param.nu = nu; | |

713 | param.C = C; | |

714 | param.eps = eps; | |

715 | param.p = p; | |

716 | param.cache_size = cache_size; | |

717 | param.shrinking = shrinking; | |

718 | param.probability = probability; | |

719 | param.nr_weight = nr_weight; | |

720 | ||

721 | if (nr_weight > 0) { | |

722 | param.weight_label = Malloc(int, nr_weight); | |

723 | param.weight = Malloc(double, nr_weight); | |

724 | int i; | |

725 | for (i=0; i<nr_weight; i++) { | |

726 | param.weight_label[i] = weight_label[i]; | |

727 | param.weight[i] = weight[i]; | |

728 | } | |

729 | } else { | |

730 | param.weight_label = NULL; | |

731 | param.weight = NULL; | |

732 | } | |

733 | ||

[11608] | 734 | int numElements = getNumOfElements(train_data); |

735 | ||

[11609] | 736 | prob.x = NULL; |

737 | prob.y = NULL; | |

738 | ||

[11608] | 739 | if (kernel_type != PRECOMPUTED) |

740 | x_space = init_problem(prob, train_data, numElements); | |

[8978] | 741 | else // Compute the matrix using the kernelFunc |

[11608] | 742 | x_space = init_precomputed_problem(prob, train_data, kernelFunc.getReference()); |

[8978] | 743 | |

[11650] | 744 | if (param.gamma == 0) |

745 | param.gamma = 1.0f / (float(numElements) / float(prob.l) - 1); | |

[8978] | 746 | |

[11650] | 747 | const char* error = svm_check_parameter(&prob, ¶m); |

[11608] | 748 | if (error){ |

[8978] | 749 | free(x_space); |

750 | free(prob.y); | |

751 | free(prob.x); | |

[11609] | 752 | svm_destroy_param(¶m); |

[8978] | 753 | raiseError("LibSVM parameter error: %s", error); |

754 | } | |

[9347] | 755 | |

756 | // If a probability model was requested LibSVM uses 5 fold | |

757 | // cross-validation to estimate the prediction errors. This includes a | |

758 | // random shuffle of the data. To make the results reproducible and | |

759 | // consistent with 'svm-train' (which always learns just on one dataset | |

760 | // in a process run) we reset the random seed. This could have unintended | |

761 | // consequences. | |

762 | if (param.probability) | |

763 | { | |

764 | srand(1); | |

765 | } | |

[8978] | 766 | svm_set_print_string_function((verbose)? NULL : &print_string_null); |

767 | ||

[11606] | 768 | model = svm_train(&prob, ¶m); |

[8978] | 769 | |

[11606] | 770 | if ((svm_type==C_SVC || svm_type==NU_SVC) && !model->nSV) { |

771 | svm_free_and_destroy_model(&model); | |

[11609] | 772 | free(x_space); |

773 | free(prob.x); | |

774 | free(prob.y); | |

775 | svm_destroy_param(¶m); | |

[11606] | 776 | raiseError("LibSVM returned no support vectors"); |

777 | } | |

778 | ||

[8978] | 779 | svm_destroy_param(¶m); |

780 | free(prob.y); | |

781 | free(prob.x); | |

[11606] | 782 | |

783 | // Consolidate the SV so x_space can be safely freed | |

784 | svm_model_consolidate_SV(model); | |

785 | ||

786 | free(x_space); | |

787 | ||

[11608] | 788 | PExampleTable supportVectors = extract_support_vectors(model, train_data); |

[11606] | 789 | |

[11608] | 790 | return PClassifier(createClassifier(domain, model, supportVectors, train_data)); |

[8978] | 791 | } |

792 | ||

[11650] | 793 | svm_node* TSVMLearner::example_to_svm(const TExample &ex, svm_node* node, double last){ |

794 | return ::example_to_svm(ex, node, last); | |

[8978] | 795 | } |

796 | ||

[9187] | 797 | svm_node* TSVMLearner::init_problem(svm_problem &problem, PExampleTable examples, int n_elements){ |

798 | problem.l = examples->numberOfExamples(); | |

[11650] | 799 | problem.y = Malloc(double, problem.l); |

[9187] | 800 | problem.x = Malloc(svm_node*, problem.l); |

801 | svm_node *x_space = Malloc(svm_node, n_elements); | |

802 | svm_node *node = x_space; | |

803 | ||

804 | for (int i = 0; i < problem.l; i++){ | |

805 | problem.x[i] = node; | |

806 | node = example_to_svm(examples->at(i), node, i); | |

807 | if (examples->domain->classVar) | |

808 | if (examples->domain->classVar->varType == TValue::FLOATVAR) | |

809 | problem.y[i] = examples->at(i).getClass().floatV; | |

810 | else if (examples->domain->classVar->varType == TValue::INTVAR) | |

811 | problem.y[i] = examples->at(i).getClass().intV; | |

812 | } | |

[11650] | 813 | |

814 | // cout << problem << endl; | |

815 | ||

[9187] | 816 | return x_space; |

817 | } | |

818 | ||

[8978] | 819 | int TSVMLearner::getNumOfElements(PExampleGenerator examples){ |

820 | return ::getNumOfElements(examples); | |

821 | } | |

822 | ||

[11606] | 823 | TSVMClassifier* TSVMLearner::createClassifier( |

[11607] | 824 | PDomain domain, svm_model* model, PExampleTable supportVectors, PExampleTable examples) { |

[11608] | 825 | PKernelFunc kfunc; |

[11606] | 826 | if (kernel_type != PRECOMPUTED) { |

[11608] | 827 | // Classifier does not need the train data and the kernelFunc. |

[11606] | 828 | examples = NULL; |

[11608] | 829 | kfunc = NULL; |

830 | } else { | |

831 | kfunc = kernelFunc; | |

[11606] | 832 | } |

[11608] | 833 | |

834 | return mlnew TSVMClassifier(domain, model, supportVectors, kfunc, examples); | |

[8978] | 835 | } |

836 | ||

837 | TSVMLearner::~TSVMLearner(){ | |

838 | if(weight_label) | |

839 | free(weight_label); | |

840 | ||

841 | if(weight) | |

[11608] | 842 | free(weight); |

[8978] | 843 | } |

844 | ||

[11650] | 845 | svm_node* TSVMLearnerSparse::example_to_svm(const TExample &ex, svm_node* node, double last){ |

[8978] | 846 | return ::example_to_svm_sparse(ex, node, last, useNonMeta); |

847 | } | |

848 | ||

849 | int TSVMLearnerSparse::getNumOfElements(PExampleGenerator examples){ | |

850 | return ::getNumOfElements(examples, true, useNonMeta); | |

851 | } | |

852 | ||

[11606] | 853 | TSVMClassifier* TSVMLearnerSparse::createClassifier( |

[11607] | 854 | PDomain domain, svm_model* model, PExampleTable supportVectors, PExampleTable examples) { |

[11608] | 855 | PKernelFunc kfunc; |

[11606] | 856 | if (kernel_type != PRECOMPUTED) { |

[11608] | 857 | // Classifier does not need the train data and the kernelFunc. |

[11606] | 858 | examples = NULL; |

[11608] | 859 | kfunc = NULL; |

860 | } else { | |

861 | kfunc = kernelFunc; | |

[11606] | 862 | } |

[11608] | 863 | return mlnew TSVMClassifierSparse(domain, model, useNonMeta, supportVectors, kfunc, examples); |

[8978] | 864 | } |

865 | ||

[11606] | 866 | |

867 | TSVMClassifier::TSVMClassifier( | |

[11607] | 868 | PDomain domain, svm_model * model, |

[11606] | 869 | PExampleTable supportVectors, |

[11607] | 870 | PKernelFunc kernelFunc, |

871 | PExampleTable examples | |

872 | ) : TClassifierFD(domain) { | |

873 | this->model = model; | |

874 | this->supportVectors = supportVectors; | |

875 | this->kernelFunc = kernelFunc; | |

[11606] | 876 | this->examples = examples; |

877 | ||

[9021] | 878 | svm_type = svm_get_svm_type(model); |

879 | kernel_type = model->param.kernel_type; | |

880 | ||

[11607] | 881 | if (svm_type == ONE_CLASS) { |

882 | this->classVar = mlnew TFloatVariable("one class"); | |

883 | } | |

884 | ||

[8978] | 885 | computesProbabilities = model && svm_check_probability_model(model) && \ |

[11607] | 886 | (svm_type != NU_SVR && svm_type != EPSILON_SVR); // Disable prob. estimation for regression |

[9021] | 887 | |

[8978] | 888 | int nr_class = svm_get_nr_class(model); |

889 | int i = 0; | |

[11606] | 890 | |

891 | /* Expose (copy) the model data (coef, rho, probA) to public | |

892 | * class interface. | |

893 | */ | |

[11607] | 894 | if (svm_type == C_SVC || svm_type == NU_SVC) { |

895 | nSV = mlnew TIntList(nr_class); // num of SVs for each class (sum(nSV) == model->l) | |

896 | for(i = 0;i < nr_class; i++) { | |

897 | nSV->at(i) = model->nSV[i]; | |

898 | } | |

899 | } | |

[8978] | 900 | |

901 | coef = mlnew TFloatListList(nr_class-1); | |

[11607] | 902 | for(i = 0; i < nr_class - 1; i++) { |

[8978] | 903 | TFloatList *coefs = mlnew TFloatList(model->l); |

[11607] | 904 | for(int j = 0;j < model->l; j++) { |

[8978] | 905 | coefs->at(j) = model->sv_coef[i][j]; |

[11607] | 906 | } |

907 | coef->at(i) = coefs; | |

[8978] | 908 | } |

[11607] | 909 | |

910 | // Number of binary classifiers in the model | |

911 | int nr_bin_cls = nr_class * (nr_class - 1) / 2; | |

912 | ||

913 | rho = mlnew TFloatList(nr_bin_cls); | |

914 | for(i = 0; i < nr_bin_cls; i++) { | |

[8978] | 915 | rho->at(i) = model->rho[i]; |

[11607] | 916 | } |

917 | ||

918 | if(model->probA) { | |

919 | probA = mlnew TFloatList(nr_bin_cls); | |

920 | if (model->param.svm_type != NU_SVR && model->param.svm_type != EPSILON_SVR && model->probB) { | |

921 | // Regression only has probA | |

922 | probB = mlnew TFloatList(nr_bin_cls); | |

923 | } | |

924 | ||

925 | for(i=0; i<nr_bin_cls; i++) { | |

[8978] | 926 | probA->at(i) = model->probA[i]; |

[11607] | 927 | if (model->param.svm_type != NU_SVR && model->param.svm_type != EPSILON_SVR && model->probB) { |

[8978] | 928 | probB->at(i) = model->probB[i]; |

[11607] | 929 | } |

[8978] | 930 | } |

931 | } | |

932 | } | |

933 | ||

[11607] | 934 | |

[8978] | 935 | TSVMClassifier::~TSVMClassifier(){ |

[11606] | 936 | if (model) { |

[8978] | 937 | svm_free_and_destroy_model(&model); |

[11606] | 938 | } |

[8978] | 939 | } |

940 | ||

[11607] | 941 | |

[8978] | 942 | PDistribution TSVMClassifier::classDistribution(const TExample & example){ |

943 | if(!model) | |

944 | raiseError("No Model"); | |

945 | ||

946 | if(!computesProbabilities) | |

947 | return TClassifierFD::classDistribution(example); | |

948 | ||

949 | int n_elements; | |

950 | if (model->param.kernel_type != PRECOMPUTED) | |

951 | n_elements = getNumOfElements(example); | |

952 | else | |

953 | n_elements = examples->numberOfExamples() + 2; | |

954 | ||

955 | int svm_type = svm_get_svm_type(model); | |

956 | int nr_class = svm_get_nr_class(model); | |

957 | ||

958 | svm_node *x = Malloc(svm_node, n_elements); | |

959 | try{ | |

960 | if (model->param.kernel_type != PRECOMPUTED) | |

961 | example_to_svm(example, x, -1.0); | |

962 | else | |

963 | example_to_svm_precomputed(example, examples, kernelFunc, x); | |

964 | } catch (...) { | |

965 | free(x); | |

966 | throw; | |

967 | } | |

968 | ||

969 | int *labels=(int *) malloc(nr_class*sizeof(int)); | |

970 | svm_get_labels(model, labels); | |

971 | ||

972 | double *prob_estimates = (double *) malloc(nr_class*sizeof(double));; | |

973 | svm_predict_probability(model, x, prob_estimates); | |

974 | ||

975 | PDistribution dist = TDistribution::create(example.domain->classVar); | |

976 | for(int i=0; i<nr_class; i++) | |

977 | dist->setint(labels[i], prob_estimates[i]); | |

978 | free(x); | |

979 | free(prob_estimates); | |

980 | free(labels); | |

981 | return dist; | |

982 | } | |

983 | ||

984 | TValue TSVMClassifier::operator()(const TExample & example){ | |

985 | if(!model) | |

986 | raiseError("No Model"); | |

987 | ||

988 | int n_elements; | |

989 | if (model->param.kernel_type != PRECOMPUTED) | |

[11606] | 990 | n_elements = getNumOfElements(example); |

[8978] | 991 | else |

992 | n_elements = examples->numberOfExamples() + 2; | |

993 | ||

994 | int svm_type = svm_get_svm_type(model); | |

995 | int nr_class = svm_get_nr_class(model); | |

996 | ||

997 | svm_node *x = Malloc(svm_node, n_elements); | |

998 | try { | |

999 | if (model->param.kernel_type != PRECOMPUTED) | |

1000 | example_to_svm(example, x); | |

1001 | else | |

1002 | example_to_svm_precomputed(example, examples, kernelFunc, x); | |

1003 | } catch (...) { | |

1004 | free(x); | |

1005 | throw; | |

1006 | } | |

1007 | ||

1008 | double v; | |

1009 | ||

1010 | if(svm_check_probability_model(model)){ | |

1011 | double *prob = (double *) malloc(nr_class*sizeof(double)); | |

1012 | v = svm_predict_probability(model, x, prob); | |

1013 | free(prob); | |

1014 | } else | |

1015 | v = svm_predict(model, x); | |

1016 | ||

1017 | free(x); | |

1018 | if(svm_type==NU_SVR || svm_type==EPSILON_SVR || svm_type==ONE_CLASS) | |

1019 | return TValue(v); | |

1020 | else | |

1021 | return TValue(int(v)); | |

1022 | } | |

1023 | ||

1024 | PFloatList TSVMClassifier::getDecisionValues(const TExample &example){ | |

1025 | if(!model) | |

1026 | raiseError("No Model"); | |

1027 | ||

1028 | int n_elements; | |

[11606] | 1029 | if (model->param.kernel_type != PRECOMPUTED) |

1030 | n_elements = getNumOfElements(example); | |

1031 | else | |

1032 | n_elements = examples->numberOfExamples() + 2; | |

[8978] | 1033 | |

1034 | int svm_type=svm_get_svm_type(model); | |

1035 | int nr_class=svm_get_nr_class(model); | |

1036 | ||

1037 | svm_node *x = Malloc(svm_node, n_elements); | |

1038 | try { | |

1039 | if (model->param.kernel_type != PRECOMPUTED) | |

1040 | example_to_svm(example, x); | |

1041 | else | |

1042 | example_to_svm_precomputed(example, examples, kernelFunc, x); | |

1043 | } catch (...) { | |

1044 | free(x); | |

1045 | throw; | |

1046 | } | |

1047 | ||

1048 | int nDecValues = nr_class*(nr_class-1)/2; | |

1049 | double *dec = (double*) malloc(sizeof(double)*nDecValues); | |

1050 | svm_predict_values(model, x, dec); | |

1051 | PFloatList res = mlnew TFloatList(nDecValues); | |

1052 | for(int i=0; i<nDecValues; i++){ | |

1053 | res->at(i) = dec[i]; | |

1054 | } | |

1055 | free(x); | |

1056 | free(dec); | |

1057 | return res; | |

1058 | } | |

1059 | ||

[11650] | 1060 | svm_node *TSVMClassifier::example_to_svm(const TExample &ex, svm_node *node, double last){ |

1061 | return ::example_to_svm(ex, node, last); | |

[8978] | 1062 | } |

1063 | ||

1064 | int TSVMClassifier::getNumOfElements(const TExample& example){ | |

1065 | return ::getNumOfElements(example); | |

1066 | } | |

[11650] | 1067 | svm_node *TSVMClassifierSparse::example_to_svm(const TExample &ex, svm_node *node, double last){ |

[8978] | 1068 | return ::example_to_svm_sparse(ex, node, last, useNonMeta); |

1069 | } | |

1070 | ||

1071 | int TSVMClassifierSparse::getNumOfElements(const TExample& example){ | |

1072 | return ::getNumOfElements(example, true, useNonMeta); | |

1073 | } | |

1074 | ||

1075 | ||

[11650] | 1076 | #include "libsvm_interface.ppp" |

**Note:**See TracBrowser for help on using the repository browser.