Pascal code implementation of SVM Based on SMO Method

Source: Internet
Author: User
Tags svm rbf kernel

Smo_svm.pas:

{
Author: Liu
Website: www.aivisoft.net
E-mail: geo.cra@gmail.com
This code is translated from a p-code by J. Platt.
This code shows a smo (sequential minimal optimization) SVM.
Enjoy fast and efficient SVM!
This code now provide 4 different kernels.
}
Unit smo_svm;

Interface

Uses
Windows, sysutils, variants, classes, graphics, math, unitypes;

Type
Tvector = record
Items: tsingleextendedarray;
Target: extended;
End;

Tvectorarray = array of tvector;

Tkernels = (kllinear, klgsrbf, klexprbf, klpolynomial, kltanh );
{
Kllinear: linear kernel
Klgsrbf: Gaussian RBF kernel
Klexprbf: exponential RBF kernel
Klpolynomial: polynomial kernel
Kltanh: Tanh Kernel
}
Tsvm = Class
Private
N_degree, N: longint;
Tolerance, EPS, B: extended;
Alph, error_cache, W, precomputed_self_dot_product: tsingleextendedarray;
Function dot_compute (I1, I2: longint): extended;
Function learned_func (K: longint): extended;
Function kernel_func (I1, I2: longint): extended;
Function pkernel_func (I1: longint; items2: tsingleextendedarray): extended;
Function examineexamples (I2: longint): longint;
Function takestep (I1, I2: longint): Boolean;
Public
End_support_ I: longint; vectors: tvectorarray;
C, two_sigma_squared, tha, thb, Pyp: extended;
Kernel: tkernels;
Procedure learn;
Procedure Init (sn_degree: longint; SC, stwo_sigma_squared, stha, sthb, spyp: extended; skernel: tkernels );
Procedure learnexamples (data: tsingleextendedarray; Target: Extended );
Function optimize: longint;
Function predict (itemk: tsingleextendedarray): extended;
Function savetofile (filename: string): Boolean;
Function loadfromfile (filename: string): Boolean;
Destructor destroy; override;
End;

Implementation

Function tsvm. dot_compute (I1, I2: longint): extended;
VaR
I: longint;
Dot: extended;
Begin
Dot: = 0;
For I: = 0 to n_degree-1 do
Dot: = dot + vectors [I1]. items [I] * vectors [I2]. items [I];
Result: = dot;
End;

Procedure tsvm. INIT (sn_degree: longint; SC, stwo_sigma_squared, stha, sthb, spyp: extended; skernel: tkernels );
VaR
I: longint;
Begin
N_degree: = sn_degree;
Setlength (vectors, $100 );
C: = SC; EPS: = 0.001; tolerance: = 0.001;
Two_sigma_squared: = stwo_sigma_squared;
Tha: = stha; THB: = sthb; Pyp: = spyp;
N: = 0; kernel: = skernel;
Setlength (W, n_degree );
End;

Procedure tsvm. learnexamples (data: tsingleextendedarray; Target: Extended );
Begin
If n> high (vectors) then
Setlength (vectors, N ++ $100 );
Setlength (vectors [N]. Items, n_degree );
Vectors [N]. Items: = data;
Vectors [N]. Target: = target;
INC (N );
End;

Function tsvm. learned_func (K: longint): extended;
VaR
S: extended;
I: longint;
Begin
S: = 0;
Case kernel
Kllinear: Begin
For I: = 0 to n_degree-1 do
S: = S + W [I] * vectors [K]. items [I];
End;
Else begin
For I: = 0 to end_support_ I-1 do
S: = S + Alph [I] * vectors [I]. Target * kernel_func (I, K );
End;
End;
S: = s-B;
Result: = s;
End;

Function tsvm. kernel_func (I1, I2: longint): extended;
VaR
S: extended;
I: longint;
Begin
S: = 0;
Case kernel
Kllinear: Begin
For I: = 0 to n_degree-1 do
S: = S + vectors [I1]. items [I] * vectors [I2]. items [I];
Result: = s;
End;
Klpolynomial: Begin
For I: = 0 to n_degree-1 do
S: = S + vectors [I1]. items [I] * vectors [I2]. items [I];
Result: = exp (Ln (S + 1) * Pyp );
End;
Klexprbf: Begin
S: = precomputed_self_dot_product [I1] + precomputed_self_dot_product [I2]-2 * dot_compute (I1, I2 );
Result: = exp (-SQRT (s)/two_sigma_squared );
End;
Klgsrbf: Begin
S: = precomputed_self_dot_product [I1] + precomputed_self_dot_product [I2]-2 * dot_compute (I1, I2 );
Result: = exp (-S/two_sigma_squared );
End;
Kltanh: Begin
For I: = 0 to n_degree-1 do
S: = S + vectors [I1]. items [I] * vectors [I2]. items [I];
Result: = Tanh (THA * s + THB );
End;
End;
End;

Function tsvm. examineexamples (I2: longint): longint;
VaR
Y2, alph2, E2, R2: extended;
K, K0, I1: longint;
Tmax, E1, temp: extended;
Begin
Y2: = vectors [I2]. target;
Alph2: = Alph [I2];

If (alph2> 0) and (alph2 <c) then
E2: = error_cache [I2]
Else
E2: = learned_func (I2)-Y2;

R2: = Y2 * E2;
If (R2 <-tolerance) and (alph2 <c) or (r2> tolerance) and (alph2> 0) then begin
I1: =-1; Tmax: = 0;
For K: = 0 to end_support_ I-1 do begin
If (Alph [k]> 0) and (Alph [k] <c) then begin
E1: = error_cache [k];
Temp: = ABS (E1-E2 );
If (temp> Tmax) then begin
Tmax: = temp;
I1: = K;
End;
End;
End;

If I1> = 0 then
If takestep (I1, I2) then begin
Result: = 1;
Exit;
End;

Randomize;
K0: = random (end_support_ I );
For K: = K0 to end_support_ I + K0-1 do begin
I1: = K mod end_support_ I;
If (Alph [I1]> 0) and (Alph [I1] <c) then
If (takestep (I1, I2) then begin
Result: = 1;
Exit;
End;
End;

Randomize;
K0: = random (end_support_ I );
For K: = K0 to end_support_ I + K0-1 do begin
I1: = K mod end_support_ I;
If takestep (I1, I2) then begin
Result: = 1;
Exit;
End;
End;
End;

Result: = 0;
End;

Function tsvm. takestep (I1, I2: longint): Boolean;
VaR
I: longint;
Y1, Y2, S: extended;
Alph1, alph2, gamma, C1, C2, B1, B2, bnew, delta_ B: extended;
A1, A2: extended;
E1, E2, L, H, K11, k22, K12, ETA, lobj, hobj, T, T1, T2: extended;
Begin
If (I1 = I2) then begin
Result: = false;
Exit;
End;

Alph1: = Alph [I1];
Y1: = vectors [I1]. target;
If (alph1> 0) and (alph1 <c) then
E1: = error_cache [I1]
Else
E1: = learned_func (I1)-Y1;

Alph2: = Alph [I2];
Y2: = vectors [I2]. target;
If (alph2> 0) and (alph2 <c) then
E2: = error_cache [I2]
Else
E2: = learned_func (I2)-Y2;

S: = Y1 * Y2;

If (Y1 = Y2) then begin
GAMMA: = alph1 + alph2;
If (gamma> C) then begin
L: = gamma-C;
H: = C;
End else begin
L: = 0;
H: = gamma;
End;
End else begin
GAMMA: = alph1-alph2;
If (gamma> 0) then begin
L: = 0;
H: = C-Gamma;
End else begin
L: =-Gamma;
H: = C;
End;
End;

If (L = h) then begin
Result: = false;
Exit;
End;

K11: = kernel_func (I1, i1 );
K12: = kernel_func (I1, I2 );
K22: = kernel_func (I2, I2 );
ETA: = 2 * K12-K11-k22;

If (ETA <0) then begin
A2: = alph2 + y2 * (E2-E1)/ETA;
If (A2 <L) Then A2: = L
Else if (A2> H) Then A2: = h;
End else begin
C1: = ETA/2;
C2: = Y2 * (E1-E2)-ETA * alph2;
Lobj: = C1 * l * L + C2 * l;
Hobj: = C1 * H + C2 * h;
If (lobj> hobj + EPS) Then A2: = L
Else if (lobj Else A2: = alph2;
End;

If A2 <1e-8 then A2: = 0
Else if A2> C-1e-8 then A2: = C;

If ABS (A2-alph2) <EPS * (A2 + alph2 + EPS) then begin
Result: = false;
Exit;
End;

A1: = alph1-S * (A2-alph2 );
If (A1 <0) then begin
A2: = a2 + S * A1;
A1: = 0;
End else if (A1> C) then begin
T: = A1-C;
A2: = a2 + S * t;
A1: = C;
End;

If (A1> 0) and (A1 <c) then
Bnew: = B + E1 + Y1 * (A1-alph1) * K11 + y2 * (A2-alph2) * K12
Else begin
If (A2> 0) and (A2 <c) then
Bnew: = B + E2 + Y1 * (A1-alph1) * K12 + y2 * (A2-alph2) * k22
Else begin
B1: = B + E1 + Y1 * (A1-alph1) * K11 + y2 * (A2-alph2) * K12;
B2: = B + E2 + Y1 * (A1-alph1) * K12 + y2 * (A2-alph2) * k22;
Bnew: = (b1 + b2)/2;
End;
End;

Delta_ B: = bnew-B;
B: = bnew;

T1: = Y1 * (A1-alph1 );
T2: = Y2 * (A2-alph2 );

If kernel = kllinear then
For I: = 0 to n_degree-1 do
W [I]: = W [I] + T1 * vectors [I1]. items [I] + T2 * vectors [I2]. items [I];

For I: = 0 to end_support_ I-1 do
If (0 <Alph [I]) and (Alph [I] <c) then
Error_cache [I]: = error_cache [I] + T1 * kernel_func (I1, I) + T2 * kernel_func (I2, I)-delta_ B;
Error_cache [I1]: = 0;
Error_cache [I2]: = 0;

Alph [I1]: = A1;
Alph [I2]: = a2;

Result: = true;
End;

Procedure tsvm. Learn;
VaR
Numchanged, I: longint;
Examineall: Boolean;
Begin
End_support_ I: = N;
Setlength (Alph, n); setlength (error_cache, N );
Setlength (precomputed_self_dot_product, N );
For I: = 0 to n_degree-1 do w [I]: = 0;
For I: = 0 to n-1 do begin
Alph [I]: = 0;
Precomputed_self_dot_product [I]: = dot_compute (I, I );
End;
B: = 0;
Numchanged: = 0;
Examineall: = true;
While (numchanged> 0) or examineall do begin
Numchanged: = 0;
If examineall then begin
For I: = 0 to n-1 do
INC (numchanged, examineexamples (I ));
End else begin
For I: = 0 to n-1 do
If (Alph [I]> 1e-8) or (Alph [I] <-1e-8) and (Alph [I]> C + 1e-8) or (Alph [I] <c-1e-8) then
INC (numchanged, examineexamples (I ));
End;
If examineall then
Examineall: = false
Else if (numchanged = 0) then
Examineall: = true;
End;
End;

Function tsvm. pkernel_func (I1: longint; items2: tsingleextendedarray): extended;
VaR
S: extended;
I: longint;
Begin
S: = 0;
Case kernel
Kllinear: Begin
For I: = 0 to n_degree-1 do
S: = S + vectors [I1]. items [I] * items2 [I];
Result: = s;
End;
Klpolynomial: Begin
For I: = 0 to n_degree-1 do
S: = S + vectors [I1]. items [I] * items2 [I];
Result: = exp (Ln (S + 1) * Pyp );
End;
Klexprbf: Begin
For I: = 0 to n_degree-1 do
S: = S + sqr (vectors [I1]. items [I]-items2 [I]);
Result: = exp (-SQRT (s)/two_sigma_squared );
End;
Klgsrbf: Begin
For I: = 0 to n_degree-1 do
S: = S + sqr (vectors [I1]. items [I]-items2 [I]);
Result: = exp (-S/two_sigma_squared );
End;
Kltanh: Begin
For I: = 0 to n_degree-1 do
S: = S + vectors [I1]. items [I] * items2 [I];
Result: = Tanh (THA * s + THB );
End;
End;
End;

Function tsvm. Optimize: longint;
VaR
Change: Boolean;
I, J: longint;
Begin
Change: = true;
While change do begin
Change: = false;
For I: = 0 to end_support_ I-1 do
If (Alph [I] <1e-8) and (Alph [I]>-1e-8) then begin
For J: = I + 1 to end_support_ I-1 do begin
Vectors [J-1]: = vectors [J];
Alph [J-1]: = Alph [J];
End;
Change: = true;
Dec (end_support_ I );
Break;
End;
End;
N: = end_support_ I;
Setlength (vectors, n); setlength (Alph, N );
Result: = end_support_ I;
End;

Function tsvm. predict (itemk: tsingleextendedarray): extended;
VaR
S: extended;
I: longint;
Begin
S: = 0;
Case kernel
Kllinear: Begin
For I: = 0 to n_degree-1 do
S: = S + W [I] * itemk [I];
End;
Else begin
For I: = 0 to end_support_ I-1 do
S: = S + Alph [I] * vectors [I]. Target * pkernel_func (I, itemk );
End;
End;
S: = s-B;
Result: = s;
End;

Function tsvm. savetofile (filename: string): Boolean;
VaR
I, J: longint;
Savestream: tmemorystream;
Begin
Savestream: = tmemorystream. Create;
Savestream. Seek (0, sofrombeginning );
Try
Savestream. Write (end_support_ I, sizeof (end_support_ I ));
Savestream. Write (n_degree, sizeof (n_degree ));
For I: = 0 to end_support_ I-1 do begin
Savestream. Write (vectors [I]. Target, sizeof (vectors [I]. Target ));
Savestream. Write (Alph [I], sizeof (Alph [I]);
For J: = 0 to n_degree-1 do
Savestream. Write (vectors [I]. items [J], sizeof (vectors [I]. items [J]);
End;
For I: = 0 to n_degree-1 do
Savestream. Write (W [I], sizeof (W [I]);
Savestream. savetofile (filename );
Result: = true;
Except
Result: = false;
End;
Savestream. Free;
End;

Function tsvm. loadfromfile (filename: string): Boolean;
VaR
I, J: longint;
Readstream: tmemorystream;
Begin
Readstream: = tmemorystream. Create;
Try
Readstream. loadfromfile (filename );
Readstream. Seek (0, sofrombeginning );
Readstream. Read (end_support_ I, sizeof (end_support_ I ));
N: = end_support_ I;
Setlength (vectors, n); setlength (Alph, N );
Readstream. Read (n_degree, sizeof (n_degree ));
For I: = 0 to end_support_ I-1 do begin
Readstream. Read (vectors [I]. Target, sizeof (vectors [I]. Target ));
Readstream. Read (Alph [I], sizeof (Alph [I]);
Setlength (vectors [I]. Items, n_degree );
For J: = 0 to n_degree-1 do
Readstream. Read (vectors [I]. items [J], sizeof (vectors [I]. items [J]);
End;
For I: = 0 to n_degree-1 do
Readstream. Read (W [I], sizeof (W [I]);
Result: = true;
Except
Result: = false;
End;
Readstream. Free;
End;

Destructor tsvm. Destroy;
Begin
Setlength (vectors, 0 );
Setlength (Alph, 0 );
Setlength (W, 0 );
Inherited;
End;

End.

---------------------------------------------------

Call form:

Unit unitmain;

Interface

Uses
Windows, messages, sysutils, variants, classes, graphics, controls, forms,
Dialogs, stdctrls, extctrls, smo_svm;

Type
Tfrmmain = Class (tform)
Imgpoint: timage;
Btntrain: tbutton;
Btnpredict: tbutton;
Rdbred: tradiobutton;
Rdbblue: tradiobutton;
Procedure btnpredictclick (Sender: tobject );
Procedure imgpointmouseup (Sender: tobject; button: tmousebutton;
Shift: tshiftstate; X, Y: integer );
Procedure formcreate (Sender: tobject );
Procedure formdestroy (Sender: tobject );
Procedure btntrainclick (Sender: tobject );
Private
Mysvm: tsvm;
Public
{Public declarations}
End;

VaR
Frmmain: tfrmmain;

Implementation

{$ R *. DFM}

Procedure tfrmmain. btnpredictclick (Sender: tobject );
VaR
I, j, X, Y: longint;
Point: tsingleextendedarray;
Begin
Setlength (point, 2 );
For I: = 0 to 320 do
For J: = 0 to 320 do begin
Point [0]: = I/320; point [1]: = J/320;
If mysvm. predict (point)> 0 then
Imgpoint. Canvas. pixels [I, j]: = clred
Else imgpoint. Canvas. pixels [I, j]: = clblue;
End;
For I: = 0 to mysvm. end_support_ I-1 do begin
X: = trunc (mysvm. vectors [I]. items [0] * 320 );
Y: = trunc (mysvm. vectors [I]. items [1] * 320 );
If mysvm. vectors [I]. Target> 0 then begin
Imgpoint. Canvas. Pen. Style: = psclear;
Imgpoint. Canvas. Brush. Color: = $ 8080ff;
Imgpoint. Canvas. ellipse (X-4, Y-4, x + 4, Y + 4 );
End else begin
Imgpoint. Canvas. Pen. Style: = psclear;
Imgpoint. Canvas. Brush. Color: = $ ff8080;
Imgpoint. Canvas. ellipse (X-4, Y-4, x + 4, Y + 4 );
End;
End;
End;

Procedure tfrmmain. imgpointmouseup (Sender: tobject; button: tmousebutton;
Shift: tshiftstate; X, Y: integer );
VaR
Point: tsingleextendedarray;
Outs: extended;
Begin
Setlength (point, 2 );
Point [0]: = x/320;
Point [1]: = y/320;
If rdbred. Checked then outs: = 1 else outs: =-1;
Mysvm. learnexamples (point, outs );
If rdbred. Checked then begin
Imgpoint. Canvas. Pen. Style: = psclear;
Imgpoint. Canvas. Brush. Color: = $ 8080ff;
Imgpoint. Canvas. ellipse (X-3, Y-3, x + 3, Y + 3 );
End else begin
Imgpoint. Canvas. Pen. Style: = psclear;
Imgpoint. Canvas. Brush. Color: = $ ff8080;
Imgpoint. Canvas. ellipse (X-3, Y-3, x + 3, Y + 3 );
End;
End;

Procedure tfrmmain. formcreate (Sender: tobject );
Begin
Mysvm: = tsvm. Create;
Mysvm. INIT (2, 1, 1, 2, 1, 8, klgsrbf );
Doublebuffered: = true;
Imgpoint. Canvas. fillrect (rect (0, 0,320,320 ));
End;

Procedure tfrmmain. formdestroy (Sender: tobject );
Begin
Mysvm. Free;
End;

Procedure tfrmmain. btntrainclick (Sender: tobject );
Begin
Mysvm. Learn;
Application. MessageBox (pchar ('there are/is '+ inttostr (mysvm. Optimize) + 'support vector (s).'), 'result ');
End;

End.

-------------------------------

Related URL: http://research.microsoft.com/users/jplatt/smo.html

Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.