Return to Snippet

Revision: 47541
at June 10, 2011 04:32 by mjaniec


Updated Code
rm(list=ls(all=TRUE))

labelsNum <- 2
learningNum <- 8
attributesNum <- 4 # d3ugooa sekwencji=ilooa atrybutów (ustalamy, ?e jest sta3a)

featuresNum <- attributesNum+(attributesNum-1) # ff dla poszczególnych atrybutów oraz ff dla dwóch kolejnych atrybutów

labels <- c(1,0)

xf <- matrix(0,learningNum,(attributesNum+1)) # przypadki; kolumna 1 - label: 1/0

xf[1,] <- c(1,6,1,7,1)
xf[2,] <- c(1,2,1,7,1)
xf[3,] <- c(0,6,0,2,2)
xf[4,] <- c(1,1,0,1,1)
xf[5,] <- c(0,5,2,1,1)
xf[6,] <- c(0,4,0,2,0)
xf[7,] <- c(1,6,1,7,2)
xf[8,] <- c(0,1,0,1,0)

w <- matrix(1,1,featuresNum) # wagi dla feature functions, inicjowana 1

resetW <- function(x) {

	matrix(x,1,featuresNum)

}

ffm <- array(0,dim=c(learningNum,featuresNum,labelsNum)) # "tensor 2. rzedu" (tablica 3d) dla wartooci feature functions dla labels

#---

ff <- function(l,a,x,learning=FALSE) { # przykladowa feature function dla pojedynczego indeksu wektora cech: l-label, f-"feature" (indeks w wektorze cech), x-wartooa feature

	a <- a+1

	if (learning) {	

		n <- length(intersect(which(xf[,1]==l),which(xf[,a]==x)))

		d <- length(which(xf[,a]==x))

		if (d!=0) n/d else 0
	}

	else {
		
		i <- which(xf[,a]==x) # który z przypadków u?ytych do nauki = x

		ifelse(length(i)>0,ffm[i[1],a-1,which(labels==l)],0)

	}
}


ffv <- function(l,x,learning) { # feature function dla danego przypadku x (wektor cech) i labela l

	fv <- matrix(0,1,attributesNum)

	for (i in 1:attributesNum) {

		fv[i] <- ff(l,i,x[i],learning)

	}

	fv

}

buildffm <- function() {

	for (l in labels) {

		for (lc in 1:learningNum) {

			ffm[lc,1:4,which(labels==l)] <- ffv(l,xf[lc,2:(attributesNum+1)],learning=TRUE)

		}
	}

	ffm
}

#--- uogólniona dla dowolnej liczby atrybutów feature function ff

isPresent <- function(x,a_values,a_positions) {	# czy dany atrybut lub atrybuty wystepuj1 na danej pozycji sekwencji x?

	a_num <- length(a_values)
	
	b <- TRUE

	for (i in 1:a_num) {

		b <- b & x[a_positions[i]]==a_values[i]

	}

	b

}

ff2 <- function(l,a_values,a_positions,ff_index=0,learning=FALSE) {	# a_values, a_positions - wektory wartooci i pozycje atrybutów

	if (learning) {

		label_count <- 0
		total_count <- 0

		for (lc in 1:learningNum) {

			presence <- isPresent(xf[lc,2:(attributesNum+1)],a_values,a_positions)

			if (presence) {

				total_count <- total_count+1
			
				if (xf[lc,1]==l) label_count <- label_count+1			

			}
		}

		label_count/total_count

	}

	else {

		 vf <- 0

		for (i in 1:learningNum) {

			if (isPresent(xf[i,2:(attributesNum+1)],a_values,a_positions) && xf[i,1]==l) {

				vf <- ffm[i,ff_index,which(labels==l)]

				break

			}

		}

		vf

	}

}

ffv2 <- function(l,x,learning) {

	fv <- matrix(0,1,featuresNum)

	#--- feature functions zwi1zane z pojedynczymi atrybutami

	for (i in 1:attributesNum) {

		fv[i] <- ff(l,i,x[i],learning)

	}

	#--- feature function(s) zwi1zane z dwoma kolejnymi atrybutami

	for (i in 1:(attributesNum-1)) {

		fv[attributesNum+i] <- ff2(l,x[i:(i+1)],c(i,i+1),ff_index=(attributesNum+i),learning)

	}

	fv
}


buildffm2 <- function() {

	for (l in labels) {

		for (lc in 1:learningNum) {

			ffm[lc,,which(labels==l)] <- ffv2(l,xf[lc,2:(attributesNum+1)],learning=TRUE)
		}
	}

	ffm
}

#---

nompf <- function(l,x,w,learning) { # licznik

	if (!learning) 

		exp(sum(ffv2(l,x,learning) * w))	# domyolnie operujemy na wektorze x

	else 

		exp(sum(ffm[x,,which(labels==l)] * w)) # je?eli sie uczymy, korzystamy z prebuilt tablicy dla przypadku numer x

}

sumpf <- function(x,w,learning) { # mianownik; s1 tylko 2 labels: 1 i 0

	sv <- 0

	for (l in labels) {

		sv <- sv + nompf(l,x,w,learning)

	}

	sv
}

pf <- function(l,x,w,learning=FALSE) { # probability function; x - vector lub numer przypadku dla learning

	nompf(l,x,w,learning) / sumpf(x,w,learning)

}

pLabel <- function(x,w) {

	vl <- matrix(0,1,labelsNum)
	colnames(vl) <- labels

	i <- 0

	for (l in labels) {

		i <- i+1

		vl[i] <- pf(l,x,w,learning=FALSE)

	}

	vl

}

#>>>>>> 2DO 2DO 2DO 2DO 2DO 2DO

modelMax <- function(w) {
}


modelError <- function(w) {	# b31d modelu jest zale?ny tylko od wag w

	r <- matrix(0,1,learningNum)

	for (lc in 1:learningNum) { # lc - learning case number

		r[lc] <- pf(1,lc,w,learning=TRUE)

	}

	sum((r-xf[,1])^2)

}

#--- initialization

xf
(ffm <- buildffm2())

#--- rozgrzewka ;)

ff(1,1,6)
ff(1,1,2)
ff(1,1,9)

pf(1,c(6,1,7,1),w)
pf(0,c(6,1,7,1),w)

pLabel(c(1,1,1,1),w)
pLabel(c(6,0,2,1),w)
pLabel(c(9,7,7,9),w)
pLabel(c(4,0,2,0),w)
pLabel(c(4,0,2,1),w)

#--- optimalization:

modelError(w)

# metody optymalizacyjne w R: http://cran.r-project.org/web/views/Optimization.html
# optymalizacja ogólnie: http://en.wikipedia.org/wiki/Optimization_(mathematics)

(o <- optim(w, modelError))

w <- o$par

pLabel(c(1,1,1,1),w)
pLabel(c(6,0,2,1),w)
pLabel(c(9,7,7,9),w)
pLabel(c(4,0,2,0),w)
pLabel(c(4,0,2,1),w)

# dodatkowy simulated annealing:

w <- resetW(1)

(optim(w, modelError, method="SANN"))

w <- o$par

pLabel(c(1,1,1,1),w)
pLabel(c(6,0,2,1),w)
pLabel(c(9,7,7,9),w)
pLabel(c(4,0,2,0),w)
pLabel(c(4,0,2,1),w)

Revision: 47540
at June 9, 2011 16:44 by mjaniec


Initial Code
featuresNum <- 4
learningNum <- 7

xf <- matrix(0,learningNum,(featuresNum+1)) # przypadki; kolumna 1 - label: 1/0

xf[1,] <- c(1,6,1,7,1)
xf[2,] <- c(1,2,1,7,1)
xf[3,] <- c(0,6,0,2,2)
xf[4,] <- c(1,1,0,1,1)
xf[5,] <- c(0,5,2,1,1)
xf[6,] <- c(0,4,0,2,0)
xf[7,] <- c(1,6,1,7,2)

w <- matrix(1,1,featuresNum) # wagi dla feature functions, inicjowana 1

#---

ff <- function(l,f,x) { # przykladowa feature function

	n <- length(intersect(which(xf[,1]==l),which(xf[,(f+1)]==x)))

	d <- length(which(xf[,(f+1)]==x))

	if (d!=0) n/d else 0

}

nompf <- function(l,x,w) { # 

	fx <- matrix(0,1,featuresNum)

	for (i in 1:featuresNum) {
		fx[i] <- w[i] * ff(l,i,x[i])
	}

	exp(sum(fx))

}

sumpf <- function(x,w) {

	nompf(1,x,w)+nompf(0,x,w)

}

pf <- function(l,x,w) { # probability function; x - vector

	nompf(l,x,w) / sumpf(x,w)

}

modelError <- function(w) {

	r <- matrix(0,1,learningNum)

	for (lc in 1:learningNum) { # lc - learning case

		r[lc] <- pf(1,xf[lc,2:(featuresNum+1)],w)

	}

	sum((r-xf[,1])^2)

}

#---

ff(1,1,6)
ff(1,1,2)
ff(1,1,9)

pf(1,c(6,1,7,1),w)
pf(0,c(6,1,7,1),w)

pf(1,c(1,1,1,1),w)
pf(1,c(6,0,2,1),w)

#--- optimalization...

# metody optymalizacyjne w R: http://cran.r-project.org/web/views/Optimization.html
# optymalizacja ogólnie: http://en.wikipedia.org/wiki/Optimization_(mathematics)
o <- optim(c(1,1,1,1), modelError)

o

w <- o$par

pf(1,c(1,1,1,1),w)
pf(1,c(6,0,2,1),w)

Initial URL
http://mjaniec.blogspot.com

Initial Description
partly inspired by: http://cseweb.ucsd.edu/~elkan/250B/cikmtutorial.pdf

Initial Title
Optimization of log linear model with one feature function - example

Initial Tags

                                

Initial Language
R