1diff -urb svm_light/Makefile svm_light-new/Makefile
2--- svm_light/Makefile	2008-10-08 14:38:53.000000000 -0500
3+++ svm_light-new/Makefile	2008-11-18 14:06:33.000000000 -0600
4@@ -16,17 +16,25 @@
5 #CFLAGS= $(SFLAGS) -pg -Wall -pedantic      # debugging C-Compiler flags
6 #LFLAGS= $(SFLAGS) -pg                      # debugging linker flags
7 LIBS=-L. -lm                               # used libraries
8+RANLIB=ranlib
9
10-all: svm_learn_hideo svm_classify
11+OPTIM=hideo
12+
13+all: svm_learn svm_classify libsvmlight.a libsvmlight.so
14+
15+libsvmlight.a: svm_learn.o svm_common.o svm_$(OPTIM).o
16+	$(AR) r $@ $^
17+	$(RANLIB) $@
18
19 tidy:
20-	rm -f *.o
21-	rm -f pr_loqo/*.o
22+	$(RM) *.o
23+	$(RM) pr_loqo/*.o
24
25 clean:	tidy
26-	rm -f svm_learn
27-	rm -f svm_classify
28-	rm -f libsvmlight.so
29+	$(RM) svm_learn
30+	$(RM) svm_classify
31+	$(RM) libsvmlight.so
32+	$(RM) libsvmlight.a
33
34 help:   info
35
36@@ -34,72 +42,63 @@
37 	@echo
38 	@echo "make for SVM-light               Thorsten Joachims, 1998"
39 	@echo
40-	@echo "Thanks to Ralf Herbrich for the initial version."
41+	@echo "Thanks to Ralf Herbrich for the initial version of the Makefile."
42 	@echo
43-	@echo "USAGE: make [svm_learn | svm_learn_loqo | svm_learn_hideo | "
44-	@echo "             libsvmlight_hideo | libsvmlight_loqo | "
45-	@echo "             svm_classify | all | clean | tidy]"
46-	@echo
47-	@echo "    svm_learn           builds the learning module (prefers HIDEO)"
48-	@echo "    svm_learn_hideo     builds the learning module using HIDEO optimizer"
49-	@echo "    svm_learn_loqo      builds the learning module using PR_LOQO optimizer"
50+	@echo "USAGE: make [svm_learn | svm_classify | libsvmlight.so | libsvmlight.a "
51+	@echo "             all | clean | tidy] [OPTIM=loqo]"
52+	@echo
53+	@echo "    svm_learn           builds the learning module"
54 	@echo "    svm_classify        builds the classfication module"
55-	@echo "    libsvmlight_hideo   builds shared object library that can be linked into"
56-	@echo "                        other code using HIDEO"
57-	@echo "    libsvmlight_loqo    builds shared object library that can be linked into"
58-	@echo "                        other code using PR_LOQO"
59+	@echo "    libsvmlight.so      builds shared object library that can be linked into"
60+	@echo "                        other code"
61+	@echo "    libsvmlight.a       builds object library that can be linked into"
62+	@echo "                        other code"
63 	@echo "    all (default)       builds svm_learn + svm_classify"
64 	@echo "    clean               removes .o and target files"
65 	@echo "    tidy                removes .o files"
66 	@echo
67+	@echo "    OPTIM=loqo          use the PR_LOQO optimizer (default is HIDEO)"
68+	@echo
69
70 # Create executables svm_learn and svm_classify
71
72-svm_learn_hideo: svm_learn_main.o svm_learn.o svm_common.o svm_hideo.o
73-	$(LD) $(LFLAGS) svm_learn_main.o svm_learn.o svm_common.o svm_hideo.o -o svm_learn $(LIBS)
74+ifeq ($(OPTIM),loqo)
75+  loqoobj=pr_loqo/pr_loqo.o
76+endif
77
78-#svm_learn_loqo: svm_learn_main.o svm_learn.o svm_common.o svm_loqo.o loqo
79-#	$(LD) $(LFLAGS) svm_learn_main.o svm_learn.o svm_common.o svm_loqo.o pr_loqo/pr_loqo.o -o svm_learn $(LIBS)
80+svm_learn: svm_learn_main.o svm_learn.o svm_common.o svm_$(OPTIM).o $(loqoobj)
81+	$(LD) $(LFLAGS) $^ -o $@ $(LIBS)
82
83 svm_classify: svm_classify.o svm_common.o
84-	$(LD) $(LFLAGS) svm_classify.o svm_common.o -o svm_classify $(LIBS)
85+	$(LD) $(LFLAGS) $^ -o $@ $(LIBS)
86
87
88 # Create library libsvmlight.so, so that external code can get access to the
89 # learning and classification functions of svm-light by linking this library.
90
91-svm_learn_hideo_noexe: svm_learn_main.o svm_learn.o svm_common.o svm_hideo.o
92+svm_learn_noexe: svm_learn_main.o svm_learn.o svm_common.o svm_$(OPTIM).o $(loqoobj)
93
94-libsvmlight_hideo: svm_learn_main.o svm_learn.o svm_common.o svm_hideo.o
95-	$(LD) -shared svm_learn.o svm_common.o svm_hideo.o -o libsvmlight.so
96+libsvmlight.so: svm_learn_main.o svm_learn.o svm_common.o svm_$(OPTIM).o $(loqoobj)
97+	$(LD) -shared $^ -o $@
98
99-#svm_learn_loqo_noexe: svm_learn_main.o svm_learn.o svm_common.o svm_loqo.o loqo
100-
101-#libsvmlight_loqo: svm_learn_main.o svm_learn.o svm_common.o svm_loqo.o
102-#	$(LD) -shared svm_learn.o svm_common.o svm_loqo.o  pr_loqo/pr_loqo.o -o libsvmlight.so
103
104 # Compile components
105
106-svm_hideo.o: svm_hideo.c
107-	$(CC) -c $(CFLAGS) svm_hideo.c -o svm_hideo.o
108-
109-#svm_loqo.o: svm_loqo.c
110-#	$(CC) -c $(CFLAGS) svm_loqo.c -o svm_loqo.o
111+svm_$(OPTIM).o: svm_$(OPTIM).c
112+	$(CC) -c $(CFLAGS) $< -o $@
113
114 svm_common.o: svm_common.c svm_common.h kernel.h
115-	$(CC) -c $(CFLAGS) svm_common.c -o svm_common.o
116+	$(CC) -c $(CFLAGS) $< -o $@
117
118 svm_learn.o: svm_learn.c svm_common.h
119-	$(CC) -c $(CFLAGS) svm_learn.c -o svm_learn.o
120+	$(CC) -c $(CFLAGS) $< -o $@
121
122 svm_learn_main.o: svm_learn_main.c svm_learn.h svm_common.h
123-	$(CC) -c $(CFLAGS) svm_learn_main.c -o svm_learn_main.o
124+	$(CC) -c $(CFLAGS) $< -o $@
125
126 svm_classify.o: svm_classify.c svm_common.h kernel.h
127-	$(CC) -c $(CFLAGS) svm_classify.c -o svm_classify.o
128-
129-#loqo: pr_loqo/pr_loqo.o
130+	$(CC) -c $(CFLAGS) $< -o $@
131
132-#pr_loqo/pr_loqo.o: pr_loqo/pr_loqo.c
133-#	$(CC) -c $(CFLAGS) pr_loqo/pr_loqo.c -o pr_loqo/pr_loqo.o
134+pr_loqo/pr_loqo.o: pr_loqo/pr_loqo.c
135+	$(CC) -c $(CFLAGS) $< -o $@
136
137diff -urb svm_light/svm_classify.c svm_light-new/svm_classify.c
138--- svm_light/svm_classify.c	2008-10-08 14:05:54.000000000 -0500
139+++ svm_light-new/svm_classify.c	2008-11-18 14:06:33.000000000 -0600
140@@ -78,19 +78,20 @@
141 	if((words[j]).wnum>model->totwords) /* are not larger than in     */
142 	  (words[j]).wnum=0;               /* model. Remove feature if   */
143       }                                        /* necessary.                 */
144+    }
145       doc = create_example(-1,0,0,0.0,create_svector(words,comment,1.0));
146       t1=get_runtime();
147+
148+    if(model->kernel_parm.kernel_type == 0) {   /* linear kernel */
149       dist=classify_example_linear(model,doc);
150-      runtime+=(get_runtime()-t1);
151-      free_example(doc,1);
152     }
153     else {                             /* non-linear kernel */
154-      doc = create_example(-1,0,0,0.0,create_svector(words,comment,1.0));
155-      t1=get_runtime();
156       dist=classify_example(model,doc);
157+    }
158+
159       runtime+=(get_runtime()-t1);
160       free_example(doc,1);
161-    }
162+
163     if(dist>0) {
164       if(pred_format==0) { /* old weired output format */
165 	fprintf(predfl,"%.8g:+1 %.8g:-1\n",dist,-dist);
166@@ -183,7 +184,7 @@
167
168 void print_help(void)
169 {
170-  printf("\nSVM-light %s: Support Vector Machine, classification module     %s\n",VERSION,VERSION_DATE);
171+  printf("\nSVM-light %s: Support Vector Machine, classification module     %s\n",SVMLIGHT_VERSION,SVMLIGHT_VERSION_DATE);
172   copyright_notice();
173   printf("   usage: svm_classify [options] example_file model_file output_file\n\n");
174   printf("options: -h         -> this help\n");
175diff -urb svm_light/svm_common.c svm_light-new/svm_common.c
176--- svm_light/svm_common.c	2008-10-08 16:00:35.000000000 -0500
177+++ svm_light-new/svm_common.c	2008-11-18 14:06:33.000000000 -0600
178@@ -527,7 +527,7 @@
179   }
180   if ((modelfl = fopen (modelfile, "w")) == NULL)
181   { perror (modelfile); exit (1); }
182-  fprintf(modelfl,"SVM-light Version %s\n",VERSION);
183+  fprintf(modelfl,"SVM-light Version %s\n",SVMLIGHT_VERSION);
184   fprintf(modelfl,"%ld # kernel type\n",
185 	  model->kernel_parm.kernel_type);
186   fprintf(modelfl,"%ld # kernel parameter -d \n",
187@@ -598,7 +598,7 @@
188   { perror (modelfile); exit (1); }
189
190   fscanf(modelfl,"SVM-light Version %s\n",version_buffer);
191-  if(strcmp(version_buffer,VERSION)) {
192+  if(strcmp(version_buffer,SVMLIGHT_VERSION)) {
193     perror ("Version of model-file does not match version of svm_classify!");
194     exit (1);
195   }
196@@ -887,6 +887,103 @@
197   return(alpha);
198 }
199
200+double costfunc(DOC **docs, double *rankvalue, long i, long j, LEARN_PARM *custom) {
201+  return (docs[i]->costfactor+docs[j]->costfactor)/2.0;
202+}
203+
204+void set_learning_defaults(LEARN_PARM *learn_parm, KERNEL_PARM *kernel_parm)
205+{
206+  learn_parm->type=CLASSIFICATION;
207+  strcpy (learn_parm->predfile, "trans_predictions");
208+  strcpy (learn_parm->alphafile, "");
209+  learn_parm->biased_hyperplane=1;
210+  learn_parm->sharedslack=0;
211+  learn_parm->remove_inconsistent=0;
212+  learn_parm->skip_final_opt_check=0;
213+  learn_parm->svm_maxqpsize=10;
214+  learn_parm->svm_newvarsinqp=0;
215+  learn_parm->svm_iter_to_shrink=2;
216+  learn_parm->maxiter=100000;
217+  learn_parm->kernel_cache_size=40;
218+  learn_parm->svm_c=0.0;
219+  learn_parm->eps=0.1;
220+  learn_parm->transduction_posratio=-1.0;
221+  learn_parm->svm_costratio=1.0;
222+  learn_parm->svm_costratio_unlab=1.0;
223+  learn_parm->svm_unlabbound=1E-5;
224+  learn_parm->epsilon_crit=0.001;
225+  learn_parm->epsilon_a=1E-15;
226+  learn_parm->compute_loo=0;
227+  learn_parm->rho=1.0;
228+  learn_parm->xa_depth=0;
229+  learn_parm->costfunc=&costfunc;
230+  learn_parm->costfunccustom=NULL;
231+
232+  kernel_parm->kernel_type=LINEAR;
233+  kernel_parm->poly_degree=3;
234+  kernel_parm->rbf_gamma=1.0;
235+  kernel_parm->coef_lin=1;
236+  kernel_parm->coef_const=1;
237+  strcpy(kernel_parm->custom,"empty");
238+}
239+
240+int check_learning_parms(LEARN_PARM *learn_parm, KERNEL_PARM *kernel_parm)
241+{
242+  if((learn_parm->skip_final_opt_check)
243+     && (kernel_parm->kernel_type == LINEAR)) {
244+    printf("\nIt does not make sense to skip the final optimality check for linear kernels.\n\n");
245+    learn_parm->skip_final_opt_check=0;
246+  }
247+  if((learn_parm->skip_final_opt_check)
248+     && (learn_parm->remove_inconsistent)) {
249+    printf("\nIt is necessary to do the final optimality check when removing inconsistent \nexamples.\n");
250+    return 0;
251+  }
252+  if((learn_parm->svm_maxqpsize<2)) {
253+    printf("\nMaximum size of QP-subproblems not in valid range: %ld [2..]\n",learn_parm->svm_maxqpsize);
254+    return 0;
255+  }
256+  if((learn_parm->svm_maxqpsize<learn_parm->svm_newvarsinqp)) {
257+    printf("\nMaximum size of QP-subproblems [%ld] must be larger than the number of\n",learn_parm->svm_maxqpsize);
258+    printf("new variables [%ld] entering the working set in each iteration.\n",learn_parm->svm_newvarsinqp);
259+    return 0;
260+  }
261+  if(learn_parm->svm_iter_to_shrink<1) {
262+    printf("\nMaximum number of iterations for shrinking not in valid range: %ld [1,..]\n",learn_parm->svm_iter_to_shrink);
263+    return 0;
264+  }
265+  if(learn_parm->svm_c<0) {
266+    printf("\nThe C parameter must be greater than zero!\n\n");
267+    return 0;
268+  }
269+  if(learn_parm->transduction_posratio>1) {
270+    printf("\nThe fraction of unlabeled examples to classify as positives must\n");
271+    printf("be less than 1.0 !!!\n\n");
272+    return 0;
273+  }
274+  if(learn_parm->svm_costratio<=0) {
275+    printf("\nThe COSTRATIO parameter must be greater than zero!\n\n");
276+    return 0;
277+  }
278+  if(learn_parm->epsilon_crit<=0) {
279+    printf("\nThe epsilon parameter must be greater than zero!\n\n");
280+    return 0;
281+  }
282+  if(learn_parm->rho<0) {
283+    printf("\nThe parameter rho for xi/alpha-estimates and leave-one-out pruning must\n");
284+    printf("be greater than zero (typically 1.0 or 2.0, see T. Joachims, Estimating the\n");
285+    printf("Generalization Performance of an SVM Efficiently, ICML, 2000.)!\n\n");
286+    return 0;
287+  }
288+  if((learn_parm->xa_depth<0) || (learn_parm->xa_depth>100)) {
289+    printf("\nThe parameter depth for ext. xi/alpha-estimates must be in [0..100] (zero\n");
290+    printf("for switching to the conventional xa/estimates described in T. Joachims,\n");
291+    printf("Estimating the Generalization Performance of an SVM Efficiently, ICML, 2000.)\n");
292+    return 0;
293+  }
294+  return 1;
295+}
296+
297 void nol_ll(char *file, long int *nol, long int *wol, long int *ll)
298      /* Grep through file and count number of lines, maximum number of
299         spaces per line, and longest line. */
300diff -urb svm_light/svm_common.h svm_light-new/svm_common.h
301--- svm_light/svm_common.h	2008-10-08 15:34:58.000000000 -0500
302+++ svm_light-new/svm_common.h	2008-11-18 14:06:33.000000000 -0600
303@@ -27,8 +27,8 @@
304 # include <time.h>
305 # include <float.h>
306
307-# define VERSION       "V6.02"
308-# define VERSION_DATE  "14.08.08"
309+# define SVMLIGHT_VERSION       "V6.02"
310+# define SVMLIGHT_VERSION_DATE  "14.08.08"
311
312 # define CFLOAT  float       /* the type of float to use for caching */
313                              /* kernel evaluations. Using float saves */
314@@ -179,6 +179,8 @@
315   double svm_unlabbound;
316   double *svm_cost;            /* individual upper bounds for each var */
317   long   totwords;             /* number of features */
318+  double (*costfunc)(DOC **, double *, long, long, struct learn_parm *);
319+  void   *costfunccustom;
320 } LEARN_PARM;
321
322 typedef struct kernel_parm {
323@@ -284,6 +286,8 @@
324 void   read_documents(char *, DOC ***, double **, long *, long *);
325 int    parse_document(char *, WORD *, double *, long *, long *, double *, long *, long, char **);
326 double *read_alphas(char *,long);
327+void   set_learning_defaults(LEARN_PARM *learn_parm, KERNEL_PARM *kernel_parm);
328+int    check_learning_parms(LEARN_PARM *learn_parm, KERNEL_PARM *kernel_parm);
329 void   nol_ll(char *, long *, long *, long *);
330 long   minl(long, long);
331 long   maxl(long, long);
332diff -urb svm_light/svm_learn.c svm_light-new/svm_learn.c
333--- svm_light/svm_learn.c	2004-08-27 16:56:21.000000000 -0500
334+++ svm_light-new/svm_learn.c	2008-11-18 14:06:33.000000000 -0600
335@@ -741,7 +741,8 @@
336   for(i=0;i<totdoc;i++) {
337     for(j=i+1;j<totdoc;j++) {
338       if(docs[i]->queryid == docs[j]->queryid) {
339-	cost=(docs[i]->costfactor+docs[j]->costfactor)/2.0;
340+	cost=learn_parm->costfunc(docs, rankvalue, i, j, learn_parm);
341+
342 	if(rankvalue[i] > rankvalue[j]) {
343 	  if(kernel_parm->kernel_type == LINEAR)
344 	    docdiff[k]=create_example(k,0,0,cost,
345@@ -2193,7 +2194,7 @@
346       qp->opt_g0[i]-=(kernel_temp*a[kj]*(double)label[kj]);
347       qp->opt_g0[j]-=(kernel_temp*a[ki]*(double)label[ki]);
348       /* compute quadratic part of objective function */
349-      qp->opt_g[varnum*i+j]=(double)label[ki]*(double)label[kj]*kernel_temp;
350+      qp->opt_g[varnum*i+j]=
351       qp->opt_g[varnum*j+i]=(double)label[ki]*(double)label[kj]*kernel_temp;
352     }
353
354diff -urb svm_light/svm_learn_main.c svm_light-new/svm_learn_main.c
355--- svm_light/svm_learn_main.c	2008-10-08 15:51:38.000000000 -0500
356+++ svm_light-new/svm_learn_main.c	2008-11-18 14:06:33.000000000 -0600
357@@ -115,38 +115,12 @@
358
359   /* set default */
360   strcpy (modelfile, "svm_model");
361-  strcpy (learn_parm->predfile, "trans_predictions");
362-  strcpy (learn_parm->alphafile, "");
363   strcpy (restartfile, "");
364   (*verbosity)=1;
365-  learn_parm->biased_hyperplane=1;
366-  learn_parm->sharedslack=0;
367-  learn_parm->remove_inconsistent=0;
368-  learn_parm->skip_final_opt_check=0;
369-  learn_parm->svm_maxqpsize=10;
370-  learn_parm->svm_newvarsinqp=0;
371-  learn_parm->svm_iter_to_shrink=-9999;
372-  learn_parm->maxiter=100000;
373-  learn_parm->kernel_cache_size=40;
374-  learn_parm->svm_c=0.0;
375-  learn_parm->eps=0.1;
376-  learn_parm->transduction_posratio=-1.0;
377-  learn_parm->svm_costratio=1.0;
378-  learn_parm->svm_costratio_unlab=1.0;
379-  learn_parm->svm_unlabbound=1E-5;
380-  learn_parm->epsilon_crit=0.001;
381-  learn_parm->epsilon_a=1E-15;
382-  learn_parm->compute_loo=0;
383-  learn_parm->rho=1.0;
384-  learn_parm->xa_depth=0;
385-  kernel_parm->kernel_type=0;
386-  kernel_parm->poly_degree=3;
387-  kernel_parm->rbf_gamma=1.0;
388-  kernel_parm->coef_lin=1;
389-  kernel_parm->coef_const=1;
390-  strcpy(kernel_parm->custom,"empty");
391   strcpy(type,"c");
392
393+  set_learning_defaults(learn_parm, kernel_parm);
394+
395   for(i=1;(i<argc) && ((argv[i])[0] == '-');i++) {
396     switch ((argv[i])[1])
397       {
398@@ -221,74 +195,8 @@
399     print_help();
400     exit(0);
401   }
402-  if((learn_parm->skip_final_opt_check)
403-     && (kernel_parm->kernel_type == LINEAR)) {
404-    printf("\nIt does not make sense to skip the final optimality check for linear kernels.\n\n");
405-    learn_parm->skip_final_opt_check=0;
406-  }
407-  if((learn_parm->skip_final_opt_check)
408-     && (learn_parm->remove_inconsistent)) {
409-    printf("\nIt is necessary to do the final optimality check when removing inconsistent \nexamples.\n");
410-    wait_any_key();
411-    print_help();
412-    exit(0);
413-  }
414-  if((learn_parm->svm_maxqpsize<2)) {
415-    printf("\nMaximum size of QP-subproblems not in valid range: %ld [2..]\n",learn_parm->svm_maxqpsize);
416-    wait_any_key();
417-    print_help();
418-    exit(0);
419-  }
420-  if((learn_parm->svm_maxqpsize<learn_parm->svm_newvarsinqp)) {
421-    printf("\nMaximum size of QP-subproblems [%ld] must be larger than the number of\n",learn_parm->svm_maxqpsize);
422-    printf("new variables [%ld] entering the working set in each iteration.\n",learn_parm->svm_newvarsinqp);
423-    wait_any_key();
424-    print_help();
425-    exit(0);
426-  }
427-  if(learn_parm->svm_iter_to_shrink<1) {
428-    printf("\nMaximum number of iterations for shrinking not in valid range: %ld [1,..]\n",learn_parm->svm_iter_to_shrink);
429-    wait_any_key();
430-    print_help();
431-    exit(0);
432-  }
433-  if(learn_parm->svm_c<0) {
434-    printf("\nThe C parameter must be greater than zero!\n\n");
435-    wait_any_key();
436-    print_help();
437-    exit(0);
438-  }
439-  if(learn_parm->transduction_posratio>1) {
440-    printf("\nThe fraction of unlabeled examples to classify as positives must\n");
441-    printf("be less than 1.0 !!!\n\n");
442-    wait_any_key();
443-    print_help();
444-    exit(0);
445-  }
446-  if(learn_parm->svm_costratio<=0) {
447-    printf("\nThe COSTRATIO parameter must be greater than zero!\n\n");
448-    wait_any_key();
449-    print_help();
450-    exit(0);
451-  }
452-  if(learn_parm->epsilon_crit<=0) {
453-    printf("\nThe epsilon parameter must be greater than zero!\n\n");
454-    wait_any_key();
455-    print_help();
456-    exit(0);
457-  }
458-  if(learn_parm->rho<0) {
459-    printf("\nThe parameter rho for xi/alpha-estimates and leave-one-out pruning must\n");
460-    printf("be greater than zero (typically 1.0 or 2.0, see T. Joachims, Estimating the\n");
461-    printf("Generalization Performance of an SVM Efficiently, ICML, 2000.)!\n\n");
462-    wait_any_key();
463-    print_help();
464-    exit(0);
465-  }
466-  if((learn_parm->xa_depth<0) || (learn_parm->xa_depth>100)) {
467-    printf("\nThe parameter depth for ext. xi/alpha-estimates must be in [0..100] (zero\n");
468-    printf("for switching to the conventional xa/estimates described in T. Joachims,\n");
469-    printf("Estimating the Generalization Performance of an SVM Efficiently, ICML, 2000.)\n");
470+
471+  if (!check_learning_parms(learn_parm, kernel_parm)) {
472     wait_any_key();
473     print_help();
474     exit(0);
475@@ -303,7 +211,7 @@
476
477 void print_help()
478 {
479-  printf("\nSVM-light %s: Support Vector Machine, learning module     %s\n",VERSION,VERSION_DATE);
480+  printf("\nSVM-light %s: Support Vector Machine, learning module     %s\n",SVMLIGHT_VERSION,SVMLIGHT_VERSION_DATE);
481   copyright_notice();
482   printf("   usage: svm_learn [options] example_file model_file\n\n");
483   printf("Arguments:\n");
484@@ -379,7 +287,7 @@
485   wait_any_key();
486   printf("\nMore details in:\n");
487   printf("[1] T. Joachims, Making Large-Scale SVM Learning Practical. Advances in\n");
488-  printf("    Kernel Methods - Support Vector Learning, B. Sch�lkopf and C. Burges and\n");
489+  printf("    Kernel Methods - Support Vector Learning, B. Schlkopf and C. Burges and\n");
490   printf("    A. Smola (ed.), MIT Press, 1999.\n");
491   printf("[2] T. Joachims, Estimating the Generalization performance of an SVM\n");
492   printf("    Efficiently. International Conference on Machine Learning (ICML), 2000.\n");
493