All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.github.chen0040.libsvm.svm_train Maven / Gradle / Ivy

There is a newer version: 1.0.4
Show newest version
package com.github.chen0040.libsvm;

/**
 * Created by xschen on 16/8/15.
 */

import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
import java.util.Vector;


class svm_train {
    private svm_parameter param;		// set by parse_command_line
    private svm_problem prob;		// set by read_problem
    private svm_model model;
    private String input_file_name;		// set by parse_command_line
    private String model_file_name;		// set by parse_command_line
    private String error_msg;
    private int cross_validation;
    private int nr_fold;

    private static svm_print_interface svm_print_null = new svm_print_interface()
    {
        public void print(String s) {}
    };

    private static void exit_with_help()
    {
        System.out.print(
                "Usage: svm_train [options] training_set_file [model_file]\n"
                        +"options:\n"
                        +"-s svm_type : set type of SVM (default 0)\n"
                        +"	0 -- C-SVC		(multi-class classification)\n"
                        +"	1 -- nu-SVC		(multi-class classification)\n"
                        +"	2 -- one-class SVM\n"
                        +"	3 -- epsilon-SVR	(regression)\n"
                        +"	4 -- nu-SVR		(regression)\n"
                        +"-t kernel_type : set type of kernel function (default 2)\n"
                        +"	0 -- linear: u'*v\n"
                        +"	1 -- polynomial: (gamma*u'*v + coef0)^degree\n"
                        +"	2 -- radial basis function: exp(-gamma*|u-v|^2)\n"
                        +"	3 -- sigmoid: tanh(gamma*u'*v + coef0)\n"
                        +"	4 -- precomputed kernel (kernel values in training_set_file)\n"
                        +"-d degree : set degree in kernel function (default 3)\n"
                        +"-g gamma : set gamma in kernel function (default 1/num_features)\n"
                        +"-r coef0 : set coef0 in kernel function (default 0)\n"
                        +"-c cost : set the parameter C of C-SVC, epsilon-SVR, and nu-SVR (default 1)\n"
                        +"-n nu : set the parameter nu of nu-SVC, one-class SVM, and nu-SVR (default 0.5)\n"
                        +"-p epsilon : set the epsilon in loss function of epsilon-SVR (default 0.1)\n"
                        +"-m cachesize : set cache memory size in MB (default 100)\n"
                        +"-e epsilon : set tolerance of termination criterion (default 0.001)\n"
                        +"-h shrinking : whether to use the shrinking heuristics, 0 or 1 (default 1)\n"
                        +"-b probability_estimates : whether to train a SVC or SVR model for probability estimates, 0 or 1 (default 0)\n"
                        +"-wi weight : set the parameter C of class i to weight*C, for C-SVC (default 1)\n"
                        +"-v n : n-fold cross validation mode\n"
                        +"-q : quiet mode (no outputs)\n"
        );
        System.exit(1);
    }

    private void do_cross_validation()
    {
        int i;
        int total_correct = 0;
        double total_error = 0;
        double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
        double[] target = new double[prob.l];

        SupportVectorMachine.svm_cross_validation(prob,param,nr_fold,target);
        if(param.svm_type == svm_parameter.EPSILON_SVR ||
                param.svm_type == svm_parameter.NU_SVR)
        {
            for(i=0;i=argv.length)
                exit_with_help();
            switch(argv[i-1].charAt(1))
            {
                case 's':
                    param.svm_type = atoi(argv[i]);
                    break;
                case 't':
                    param.kernel_type = atoi(argv[i]);
                    break;
                case 'd':
                    param.degree = atoi(argv[i]);
                    break;
                case 'g':
                    param.gamma = atof(argv[i]);
                    break;
                case 'r':
                    param.coef0 = atof(argv[i]);
                    break;
                case 'n':
                    param.nu = atof(argv[i]);
                    break;
                case 'm':
                    param.cache_size = atof(argv[i]);
                    break;
                case 'c':
                    param.C = atof(argv[i]);
                    break;
                case 'e':
                    param.eps = atof(argv[i]);
                    break;
                case 'p':
                    param.p = atof(argv[i]);
                    break;
                case 'h':
                    param.shrinking = atoi(argv[i]);
                    break;
                case 'b':
                    param.probability = atoi(argv[i]);
                    break;
                case 'q':
                    print_func = svm_print_null;
                    i--;
                    break;
                case 'v':
                    cross_validation = 1;
                    nr_fold = atoi(argv[i]);
                    if(nr_fold < 2)
                    {
                        System.err.print("n-fold cross validation: n must >= 2\n");
                        exit_with_help();
                    }
                    break;
                case 'w':
                    ++param.nr_weight;
                {
                    int[] old = param.weight_label;
                    param.weight_label = new int[param.nr_weight];
                    System.arraycopy(old,0,param.weight_label,0,param.nr_weight-1);
                }

                {
                    double[] old = param.weight;
                    param.weight = new double[param.nr_weight];
                    System.arraycopy(old,0,param.weight,0,param.nr_weight-1);
                }

                param.weight_label[param.nr_weight-1] = atoi(argv[i-1].substring(2));
                param.weight[param.nr_weight-1] = atof(argv[i]);
                break;
                default:
                    System.err.print("Unknown option: " + argv[i-1] + "\n");
                    exit_with_help();
            }
        }

        SupportVectorMachine.svm_set_print_string_function(print_func);

        // determine filenames

        if(i>=argv.length)
            exit_with_help();

        input_file_name = argv[i];

        if(i vy = new Vector();
        Vector vx = new Vector();
        int max_index = 0;

        while(true)
        {
            String line = fp.readLine();
            if(line == null) break;

            StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");

            vy.addElement(atof(st.nextToken()));
            int m = st.countTokens()/2;
            SupportVectorMachineNode[] x = new SupportVectorMachineNode[m];
            for(int j=0;j0) max_index = Math.max(max_index, x[m-1].index);
            vx.addElement(x);
        }

        fp.close();

        prob = new svm_problem();
        prob.l = vy.size();
        prob.x = new SupportVectorMachineNode[prob.l][];
        for(int i=0;i 0)
            param.gamma = 1.0/max_index;

        if(param.kernel_type == svm_parameter.PRECOMPUTED)
            for(int i=0;i max_index)
                {
                    System.err.print("Wrong input format: sample_serial_number out of range\n");
                    throw new RuntimeException("Wrong input format: sample_serial_number out of range");
                }
            }

        fp.close();
    }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy