[关闭]
@HaomingJiang 2018-03-01T12:12:26.000000Z 字数 23568 阅读 1230

HW 3

Haoming Jiang


Ex 3.1

a

3.1.1.png-3.4kB

  1. > #########################################################################
  2. > ### EXERCISE 3.1
  3. > #########################################################################
  4. > # baseball.dat = entire data set
  5. > # baseball.sub = matrix of all predictors
  6. > # salary.log = response, log salary
  7. > # n = number of observations in the data set
  8. > # m = number of predictors in the data set
  9. > # num.starts = number of random starts
  10. > # runs = matrix containing the random starts where each row is a
  11. > # vector of the parameters included for the model
  12. > # (1 = included, 0 = omitted)
  13. > # runs.aic = AIC values for the best model found for each
  14. > # of the random starts
  15. > # itr = number of iterations for each random start
  16. > #########################################################################
  17. >
  18. > ## INITIAL VALUES
  19. > baseball.dat = read.table("baseball.dat",header=TRUE)
  20. > baseball.dat$freeagent = factor(baseball.dat$freeagent)
  21. > baseball.dat$arbitration = factor(baseball.dat$arbitration)
  22. > baseball.sub = baseball.dat[,-1]
  23. > salary.log = log(baseball.dat$salary)
  24. > n = length(salary.log)
  25. > m = length(baseball.sub[1,])
  26. > num.starts = 5
  27. > runs = matrix(0,num.starts,m)
  28. > itr = 15
  29. > runs.aic = matrix(0,num.starts,itr)
  30. >
  31. > # INITIALIZES STARTING RUNS
  32. > set.seed(19676)
  33. > for(i in 1:num.starts){runs[i,] = rbinom(m,1,.5)}
  34. >
  35. > ## MAIN
  36. > for(k in 1:num.starts){
  37. + run.current = runs[k,]
  38. +
  39. + # ITERATES EACH RANDOM START
  40. + for(j in 1:itr){
  41. + run.vars = baseball.sub[,run.current==1]
  42. + g = lm(salary.log~.,run.vars)
  43. + run.aic = extractAIC(g)[2]
  44. + run.next = run.current
  45. +
  46. + # TESTS ALL MODELS IN THE 1-NEIGHBORHOOD AND SELECTS THE
  47. + # MODEL WITH THE LOWEST AIC
  48. + for(i in sample(m)){
  49. + run.step = run.current
  50. + run.step[i] = !run.current[i]
  51. + run.vars = baseball.sub[,run.step==1]
  52. + g = lm(salary.log~.,run.vars)
  53. + run.step.aic = extractAIC(g)[2]
  54. + if(run.step.aic < run.aic){
  55. + run.next = run.step
  56. + run.aic = run.step.aic
  57. + break
  58. + }
  59. + }
  60. + run.current = run.next
  61. + runs.aic[k,j]=run.aic
  62. + }
  63. + runs[k,] = run.current
  64. + }
  65. >
  66. > ## OUTPUT
  67. > runs # LISTS OF PREDICTORS
  68. [,1] [,2] [,3] [,4] [,5] [,6] [,7] [,8] [,9] [,10] [,11] [,12] [,13] [,14] [,15] [,16]
  69. [1,] 1 0 0 1 0 0 1 0 1 1 0 0 1 1 1 0
  70. [2,] 1 0 1 0 0 1 0 1 0 1 1 0 1 1 1 1
  71. [3,] 0 0 1 0 0 1 0 1 0 1 0 0 1 1 1 0
  72. [4,] 1 1 1 0 0 1 0 1 0 1 0 0 1 1 1 1
  73. [5,] 1 0 1 0 0 0 0 1 0 1 0 0 1 1 1 1
  74. [,17] [,18] [,19] [,20] [,21] [,22] [,23] [,24] [,25] [,26] [,27]
  75. [1,] 0 1 1 0 0 0 0 0 1 0 1
  76. [2,] 1 1 0 0 1 0 0 1 0 1 1
  77. [3,] 0 0 0 1 1 1 0 0 1 1 0
  78. [4,] 0 0 0 0 0 1 1 1 1 1 0
  79. [5,] 0 0 0 0 0 0 0 1 1 0 1
  80. > runs.aic # AIC VALUES
  81. [,1] [,2] [,3] [,4] [,5] [,6] [,7] [,8]
  82. [1,] -217.9065 -217.9935 -219.8484 -220.1619 -221.0135 -222.6077 -223.7776 -223.9698
  83. [2,] -217.2085 -218.3258 -219.5092 -219.7432 -221.7374 -223.6676 -398.4278 -399.6693
  84. [3,] -403.9695 -405.3088 -409.3507 -410.7968 -411.9632 -412.6045 -414.2432 -416.2141
  85. [4,] -124.5394 -127.0578 -127.5083 -400.2682 -401.1612 -401.6403 -401.6661 -402.5395
  86. [5,] -135.9155 -137.7239 -407.5777 -409.1986 -410.8303 -412.4298 -412.6726 -413.9250
  87. [,9] [,10] [,11] [,12] [,13] [,14] [,15]
  88. [1,] -225.8663 -232.0466 -233.5325 -400.9725 -402.8854 -403.2838 -404.8729
  89. [2,] -409.1566 -409.3218 -409.6924 -410.1146 -410.6694 -411.0543 -412.0218
  90. [3,] -416.5069 -416.5069 -416.5069 -416.5069 -416.5069 -416.5069 -416.5069
  91. [4,] -407.9254 -407.9608 -410.2451 -410.6919 -410.8825 -411.9445 -413.5147
  92. [5,] -414.0846 -414.2130 -416.1533 -416.2576 -417.8858 -417.8858 -417.8858
  93. >
  94. > ##PLOT
  95. > plot(1:(itr*num.starts),-c(t(runs.aic)),xlab="Cumulative Iterations",
  96. + ylab="Negative AIC",ylim=c(360,420),type="n")
  97. > for(i in 1:num.starts) {
  98. + lines((i-1)*itr+(1:itr),-runs.aic[i,]) }

b

It takes longer time. And AIC are much greater than the previouse ones. All because somtimes it can not find a downhill 2-neighborhood.
3.1.2.png-2.9kB

  1. > #########################################################################
  2. > ### EXERCISE 3.1
  3. > #########################################################################
  4. > # baseball.dat = entire data set
  5. > # baseball.sub = matrix of all predictors
  6. > # salary.log = response, log salary
  7. > # n = number of observations in the data set
  8. > # m = number of predictors in the data set
  9. > # num.starts = number of random starts
  10. > # runs = matrix containing the random starts where each row is a
  11. > # vector of the parameters included for the model
  12. > # (1 = included, 0 = omitted)
  13. > # runs.aic = AIC values for the best model found for each
  14. > # of the random starts
  15. > # itr = number of iterations for each random start
  16. > #########################################################################
  17. >
  18. > ## INITIAL VALUES
  19. > baseball.dat = read.table("baseball.dat",header=TRUE)
  20. > baseball.dat$freeagent = factor(baseball.dat$freeagent)
  21. > baseball.dat$arbitration = factor(baseball.dat$arbitration)
  22. > baseball.sub = baseball.dat[,-1]
  23. > salary.log = log(baseball.dat$salary)
  24. > n = length(salary.log)
  25. > m = length(baseball.sub[1,])
  26. > num.starts = 5
  27. > runs = matrix(0,num.starts,m)
  28. > itr = 15
  29. > runs.aic = matrix(0,num.starts,itr)
  30. >
  31. > # INITIALIZES STARTING RUNS
  32. > set.seed(19676)
  33. > for(i in 1:num.starts){runs[i,] = rbinom(m,1,.5)}
  34. >
  35. > all2combn = combn(m,2)
  36. >
  37. > ## MAIN
  38. > for(k in 1:num.starts){
  39. + run.current = runs[k,]
  40. +
  41. + # ITERATES EACH RANDOM START
  42. + for(j in 1:itr){
  43. + run.vars = baseball.sub[,run.current==1]
  44. + g = lm(salary.log~.,run.vars)
  45. + run.aic = extractAIC(g)[2]
  46. + run.next = run.current
  47. +
  48. + # TESTS ALL MODELS IN THE 1-NEIGHBORHOOD AND SELECTS THE
  49. + # MODEL WITH THE LOWEST AIC
  50. + for(i in sample(m*(m-1)/2)){
  51. + run.step = run.current
  52. + run.step[all2combn[1][i]] = !run.current[all2combn[1][i]]
  53. + run.step[all2combn[2][i]] = !run.current[all2combn[2][i]]
  54. + run.vars = baseball.sub[,run.step==1]
  55. + g = lm(salary.log~.,run.vars)
  56. + run.step.aic = extractAIC(g)[2]
  57. + if(run.step.aic < run.aic){
  58. + run.next = run.step
  59. + run.aic = run.step.aic
  60. + break
  61. + }
  62. + }
  63. + run.current = run.next
  64. + runs.aic[k,j]=run.aic
  65. + }
  66. + runs[k,] = run.current
  67. + }
  68. >
  69. > ## OUTPUT
  70. > runs # LISTS OF PREDICTORS
  71. [,1] [,2] [,3] [,4] [,5] [,6] [,7] [,8] [,9] [,10] [,11] [,12] [,13] [,14] [,15] [,16]
  72. [1,] 1 0 0 1 1 1 0 0 0 0 0 1 1 0 0 0
  73. [2,] 1 1 0 0 0 1 1 1 1 1 1 0 1 0 0 1
  74. [3,] 1 0 0 0 1 0 0 1 0 1 1 0 1 1 1 1
  75. [4,] 1 0 1 0 0 1 1 0 1 1 0 0 0 1 0 1
  76. [5,] 1 1 1 1 1 0 1 1 1 1 0 1 0 1 1 0
  77. [,17] [,18] [,19] [,20] [,21] [,22] [,23] [,24] [,25] [,26] [,27]
  78. [1,] 0 1 1 0 1 1 0 0 1 0 0
  79. [2,] 1 1 0 1 0 0 0 1 1 1 1
  80. [3,] 0 0 0 0 0 1 0 0 1 0 0
  81. [4,] 0 1 1 0 1 1 1 1 0 0 0
  82. [5,] 0 0 0 1 0 1 1 1 1 0 1
  83. > runs.aic # AIC VALUES
  84. [,1] [,2] [,3] [,4] [,5] [,6] [,7] [,8]
  85. [1,] -217.2883 -217.2883 -217.2883 -217.2883 -217.2883 -217.2883 -217.2883 -217.2883
  86. [2,] -216.9405 -216.9405 -216.9405 -216.9405 -216.9405 -216.9405 -216.9405 -216.9405
  87. [3,] -400.3062 -400.3062 -400.3062 -400.3062 -400.3062 -400.3062 -400.3062 -400.3062
  88. [4,] -122.8772 -122.8772 -122.8772 -122.8772 -122.8772 -122.8772 -122.8772 -122.8772
  89. [5,] -136.5481 -136.5481 -136.5481 -136.5481 -136.5481 -136.5481 -136.5481 -136.5481
  90. [,9] [,10] [,11] [,12] [,13] [,14] [,15]
  91. [1,] -217.2883 -217.2883 -217.2883 -217.2883 -217.2883 -217.2883 -217.2883
  92. [2,] -216.9405 -216.9405 -216.9405 -216.9405 -216.9405 -216.9405 -216.9405
  93. [3,] -400.3062 -400.3062 -400.3062 -400.3062 -400.3062 -400.3062 -400.3062
  94. [4,] -122.8772 -122.8772 -122.8772 -122.8772 -122.8772 -122.8772 -122.8772
  95. [5,] -136.5481 -136.5481 -136.5481 -136.5481 -136.5481 -136.5481 -136.5481
  96. >
  97. > ##PLOT
  98. > plot(1:(itr*num.starts),-c(t(runs.aic)),xlab="Cumulative Iterations",
  99. + ylab="Negative AIC",ylim=c(360,420),type="n")
  100. > for(i in 1:num.starts) {
  101. + lines((i-1)*itr+(1:itr),-runs.aic[i,]) }

Ex 3.8

We can just do kmeans, which can find a local minima.
The size of clusters are 67,62,49
The following is the visualization of the clustering in 2D space via PCA.
3.8.png-4.3kB

  1. > wine.dat = read.table("wine.dat",header=TRUE)
  2. > cl <- kmeans(wine.dat, 3)
  3. > print(cl$size)
  4. [1] 49 67 62
  5. >
  6. > # plot in 2D via PCA
  7. > pcafit <- princomp(wine.dat)
  8. > plot(pcafit$scores[,1:2], col = cl$cluster)

Ex 4.1

a&b

The M-step is the same. But for E-step, we use and to esitmate the expexted genotype frequencies instead and .
Result:

  1. # FINAL ESTIMATE FOR ALLELE PROBABILITIES (p.c, p.i, p.t)
  2. [1] 0.03606708 0.19579915 0.76813377
  1. > #########################################################################
  2. > # x = observed phenotype counts (carbonaria, insularia, typica, i or t)
  3. > # n = expected genotype frequencies (CC, CI, CT, II, IT, TT)
  4. > # p = allele probabilities (carbonaria, insularia, typica)
  5. > # itr = number of iterations
  6. > # allele.e = computes expected genotype frequencies
  7. > # allele.m = computes allele probabilities
  8. > #########################################################################
  9. >
  10. > ## INITIAL VALUES
  11. > x = c(85, 196, 341, 578)
  12. > n = rep(0,6)
  13. > p = rep(1/3,3)
  14. > itr = 40
  15. >
  16. > ## EXPECTATION AND MAXIMIZATION FUNCTIONS
  17. > allele.e = function(x,p){
  18. + n.cc = (x[1]*(p[1]^2))/((p[1]^2)+2*p[1]*p[2]+2*p[1]*p[3])
  19. + n.ci = (2*x[1]*p[1]*p[2])/((p[1]^2)+2*p[1]*p[2]+2*p[1]*p[3])
  20. + n.ct = (2*x[1]*p[1]*p[3])/((p[1]^2)+2*p[1]*p[2]+2*p[1]*p[3])
  21. + x2hat = x[2]+x[4]*((p[2]^2)+2*p[2]*p[3])/((p[2]^2)+2*p[2]*p[3]+p[3]*p[3])
  22. + x3hat = x[3]+x[4]*(p[3]*p[3])/((p[2]^2)+2*p[2]*p[3]+p[3]*p[3])
  23. + n.ii = (x2hat*(p[2]^2))/((p[2]^2)+2*p[2]*p[3])
  24. + n.it = (2*x2hat*p[2]*p[3])/((p[2]^2)+2*p[2]*p[3])
  25. + n = c(n.cc,n.ci,n.ct,n.ii,n.it,x3hat)
  26. + return(n)
  27. + }
  28. >
  29. > allele.m = function(x,n){
  30. + p.c = (2*n[1]+n[2]+n[3])/(2*sum(x))
  31. + p.i = (2*n[4]+n[5]+n[2])/(2*sum(x))
  32. + p.t = (2*n[6]+n[3]+n[5])/(2*sum(x))
  33. + p = c(p.c,p.i,p.t)
  34. + return(p)
  35. + }
  36. >
  37. > ## MAIN
  38. > for(i in 1:itr){
  39. + n = allele.e(x,p)
  40. + p = allele.m(x,n)
  41. + }
  42. >
  43. > ## OUTPUT
  44. > p # FINAL ESTIMATE FOR ALLELE PROBABILITIES (p.c, p.i, p.t)
  45. [1] 0.03606708 0.19579915 0.76813377

c

Result:

  1. > ## OUTPUT
  2. > sd.hat # STANDARD ERROR ESTIMATES (pc, pi, pt)
  3. [1] 0.003840308 0.011077418 0.011530716
  4. > cor.hat # CORRELATION ESTIMATES ({pc,pi}, {pc,pt}, {pi,pt})
  5. [1] -0.03341484 -0.30094901 -0.94955901

Code:

  1. > #########################################################################
  2. > # x = observed phenotype counts (carbonaria, insularia, typica)
  3. > # n = expected genotype frequencies (CC, CI, CT, II, IT, TT)
  4. > # p = allele probabilities (carbonaria, insularia, typica)
  5. > # p.em = em algorithm estimates
  6. > # n.em = em algorithm estimates
  7. > # itr = number of iterations
  8. > # allele.e = computes expected genotype frequencies
  9. > # allele.m = computes allele probabilities
  10. > #########################################################################
  11. >
  12. > ## INITIAL VALUES
  13. > x = c(85, 196, 341, 578)
  14. > n = rep(0,6)
  15. > itr = 40
  16. > p = c(0.04, 0.19, 0.77)
  17. > p.em = p
  18. > theta = matrix(0,3,3)
  19. > psi = rep(0,3)
  20. > r = matrix(0,3,3)
  21. >
  22. > ## EXPECTATION AND MAXIMIZATION FUNCTIONS
  23. > allele.e = function(x,p){
  24. + n.cc = (x[1]*(p[1]^2))/((p[1]^2)+2*p[1]*p[2]+2*p[1]*p[3])
  25. + n.ci = (2*x[1]*p[1]*p[2])/((p[1]^2)+2*p[1]*p[2]+2*p[1]*p[3])
  26. + n.ct = (2*x[1]*p[1]*p[3])/((p[1]^2)+2*p[1]*p[2]+2*p[1]*p[3])
  27. + x2hat = x[2]+x[4]*((p[2]^2)+2*p[2]*p[3])/((p[2]^2)+2*p[2]*p[3]+p[3]*p[3])
  28. + x3hat = x[3]+x[4]*(p[3]*p[3])/((p[2]^2)+2*p[2]*p[3]+p[3]*p[3])
  29. + n.ii = (x2hat*(p[2]^2))/((p[2]^2)+2*p[2]*p[3])
  30. + n.it = (2*x2hat*p[2]*p[3])/((p[2]^2)+2*p[2]*p[3])
  31. + n = c(n.cc,n.ci,n.ct,n.ii,n.it,x3hat)
  32. + return(n)
  33. + }
  34. >
  35. > allele.m = function(x,n){
  36. + p.c = (2*n[1]+n[2]+n[3])/(2*sum(x))
  37. + p.i = (2*n[4]+n[5]+n[2])/(2*sum(x))
  38. + p.t = (2*n[6]+n[3]+n[5])/(2*sum(x))
  39. + p = c(p.c,p.i,p.t)
  40. + return(p)
  41. + }
  42. >
  43. > ## COMPUTES EM ALGORITHM ESTIMATES
  44. > for(i in 1:itr){
  45. + n.em = allele.e(x,p.em)
  46. + p.em = allele.m(x,n.em)
  47. + }
  48. >
  49. > ## INTIALIZES THETA
  50. > for(j in 1:length(p)){
  51. + theta[,j] = p.em
  52. + theta[j,j] = p[j]
  53. + }
  54. >
  55. > ## MAIN
  56. > for(t in 1:5){
  57. + n = allele.e(x,p)
  58. + p.hat = allele.m(x,n)
  59. + for(j in 1:length(p)){
  60. + theta[j,j] = p.hat[j]
  61. + n = allele.e(x,theta[,j])
  62. + psi = allele.m(x,n)
  63. + for(i in 1:length(p)){
  64. + r[i,j] = (psi[i]-p.em[i])/(theta[j,j]-p.em[j])
  65. + }
  66. + }
  67. + p = p.hat
  68. + }
  69. >
  70. > ## COMPLETE INFORMATION
  71. > iy.hat=matrix(0,2,2)
  72. > iy.hat[1,1] = ((2*n.em[1]+n.em[2]+n.em[3])/(p.em[1]^2) +
  73. + (2*n.em[6]+n.em[3]+n.em[5])/(p.em[3]^2))
  74. > iy.hat[2,2] = ((2*n.em[4]+n.em[5]+n.em[2])/(p.em[2]^2) +
  75. + (2*n.em[6]+n.em[3]+n.em[5])/(p.em[3]^2))
  76. > iy.hat[1,2] = iy.hat[2,1] = (2*n.em[6]+n.em[3]+n.em[5])/(p.em[3]^2)
  77. >
  78. > ## COMPUTES STANDARD ERRORS AND CORRELATIONS
  79. > var.hat = solve(iy.hat)%*%(diag(2)+t(r[-3,-3])%*%solve(diag(2)-t(r[-3,-3])))
  80. > sd.hat = c(sqrt(var.hat[1,1]),sqrt(var.hat[2,2]),sqrt(sum(var.hat)))
  81. > cor.hat = c(var.hat[1,2]/(sd.hat[1]*sd.hat[2]),
  82. + (-var.hat[1,1]-var.hat[1,2])/(sd.hat[1]*sd.hat[3]),
  83. + (-var.hat[2,2]-var.hat[1,2])/(sd.hat[2]*sd.hat[3]))
  84. >
  85. > ## OUTPUT
  86. > sd.hat # STANDARD ERROR ESTIMATES (pc, pi, pt)
  87. [1] 0.003840308 0.011077418 0.011530716
  88. > cor.hat # CORRELATION ESTIMATES ({pc,pi}, {pc,pt}, {pi,pt})
  89. [1] -0.03341484 -0.30094901 -0.94955901

d

Result:

  1. > ## OUTPUT
  2. > theta[,1] # EM ESTIMATE FOR ALLELE PROBABILITIES (p.c, p.i, p.t)
  3. [1] 0.03606708 0.19579915 0.76813377
  4. > sd.hat # STANDARD ERROR ESTIMATES (p.c, p.i, p.t)
  5. [1] 0.003864236 0.012547943 0.012905975
  6. > cor.hat # CORRELATION ESTIMATES ({p.c,p.i}, {p.c,p.t}, {p.i,p.t})
  7. [1] -0.06000425 -0.24107484 -0.95429228

Code:

  1. > #########################################################################
  2. > # x = observed phenotype counts (carbonaria, insularia, typica)
  3. > # n = expected genotype frequencies (CC, CI, CT, II, IT, TT)
  4. > # p = allele probabilities (carbonaria, insularia, typica)
  5. > # itr = number of iterations
  6. > # theta = allele probabilities for psuedo-data
  7. > # allele.e = computes expected genotype frequencies
  8. > # allele.m = computes allele probabilities
  9. > #########################################################################
  10. >
  11. > ## INITIAL VALUES
  12. > x = c(85, 196, 341,578)
  13. > n = rep(0,6)
  14. > p = rep(1/3,3)
  15. > itr = 40
  16. > theta = matrix(0,3,10000)
  17. > set.seed(0)
  18. >
  19. > ## EXPECTATION AND MAXIMIZATION FUNCTIONS
  20. > allele.e = function(x,p){
  21. + n.cc = (x[1]*(p[1]^2))/((p[1]^2)+2*p[1]*p[2]+2*p[1]*p[3])
  22. + n.ci = (2*x[1]*p[1]*p[2])/((p[1]^2)+2*p[1]*p[2]+2*p[1]*p[3])
  23. + n.ct = (2*x[1]*p[1]*p[3])/((p[1]^2)+2*p[1]*p[2]+2*p[1]*p[3])
  24. + x2hat = x[2]+x[4]*((p[2]^2)+2*p[2]*p[3])/((p[2]^2)+2*p[2]*p[3]+p[3]*p[3])
  25. + x3hat = x[3]+x[4]*(p[3]*p[3])/((p[2]^2)+2*p[2]*p[3]+p[3]*p[3])
  26. + n.ii = (x2hat*(p[2]^2))/((p[2]^2)+2*p[2]*p[3])
  27. + n.it = (2*x2hat*p[2]*p[3])/((p[2]^2)+2*p[2]*p[3])
  28. + n = c(n.cc,n.ci,n.ct,n.ii,n.it,x3hat)
  29. + return(n)
  30. + }
  31. > allele.m = function(x,n){
  32. + p.c = (2*n[1]+n[2]+n[3])/(2*sum(x))
  33. + p.i = (2*n[4]+n[5]+n[2])/(2*sum(x))
  34. + p.t = (2*n[6]+n[3]+n[5])/(2*sum(x))
  35. + p = c(p.c,p.i,p.t)
  36. + return(p)
  37. + }
  38. >
  39. > ## MAIN
  40. > for(i in 1:itr){
  41. + n = allele.e(x,p)
  42. + p = allele.m(x,n)
  43. + }
  44. > theta[,1] = p
  45. > for(j in 2:10000){
  46. + n.c = rbinom(1, sum(x), x[1]/sum(x))
  47. + n.i = rbinom(1, sum(x) - n.c, x[2]/(sum(x)-x[1]))
  48. + n.t = rbinom(1, sum(x) - n.c - n.i, x[3]/(sum(x)-x[1]-x[2]))
  49. + n.u = sum(x) - n.c - n.i - n.t
  50. + x.new = c(n.c, n.i, n.t, n.u)
  51. + n = rep(0,6)
  52. + p = rep(1/3,3)
  53. + for(i in 1:itr){
  54. + n = allele.e(x.new,p)
  55. + p = allele.m(x.new,n)
  56. + }
  57. + theta[,j] = p
  58. + }
  59. >
  60. > sd.hat = c(sd(theta[1,]), sd(theta[2,]), sd(theta[3,]))
  61. > cor.hat = c(cor(theta[1,],theta[2,]), cor(theta[1,],theta[3,]),
  62. + cor(theta[2,],theta[3,]))
  63. >
  64. > ## OUTPUT
  65. > theta[,1] # EM ESTIMATE FOR ALLELE PROBABILITIES (p.c, p.i, p.t)
  66. [1] 0.03606708 0.19579915 0.76813377
  67. > sd.hat # STANDARD ERROR ESTIMATES (p.c, p.i, p.t)
  68. [1] 0.003864236 0.012547943 0.012905975
  69. > cor.hat # CORRELATION ESTIMATES ({p.c,p.i}, {p.c,p.t}, {p.i,p.t})
  70. [1] -0.06000425 -0.24107484 -0.95429228

e

Output:

  1. > ## OUTPUT
  2. > p # FINAL ESTIMATE FOR ALLELE PROBABILITIES (p.c, p.i, p.t)
  3. [1] 0.03596147 0.19582060 0.76821793

41e.png-4.2kB

Code:

  1. ## INITIAL VALUES
  2. x = c(85, 196, 341, 578)
  3. n = rep(0,6)
  4. p = rep(1/3,3)
  5. itr = 40
  6. prob.values = matrix(0,3,itr+1)
  7. prob.values[,1] = p
  8. alpha.default = 2
  9. ## FUNCTIONS
  10. allele.e = function(x,p){
  11. n.cc = (x[1]*(p[1]^2))/((p[1]^2)+2*p[1]*p[2]+2*p[1]*p[3])
  12. n.ci = (2*x[1]*p[1]*p[2])/((p[1]^2)+2*p[1]*p[2]+2*p[1]*p[3])
  13. n.ct = (2*x[1]*p[1]*p[3])/((p[1]^2)+2*p[1]*p[2]+2*p[1]*p[3])
  14. x2hat = x[2]+x[4]*((p[2]^2)+2*p[2]*p[3])/((p[2]^2)+2*p[2]*p[3]+p[3]*p[3])
  15. x3hat = x[3]+x[4]*(p[3]*p[3])/((p[2]^2)+2*p[2]*p[3]+p[3]*p[3])
  16. n.ii = (x2hat*(p[2]^2))/((p[2]^2)+2*p[2]*p[3])
  17. n.it = (2*x2hat*p[2]*p[3])/((p[2]^2)+2*p[2]*p[3])
  18. n = c(n.cc,n.ci,n.ct,n.ii,n.it,x3hat)
  19. return(n)
  20. }
  21. allele.l = function(x,p){
  22. l = ( x[1]*log(2*p[1] - p[1]^2) + x[2]*log(p[2]^2 + 2*p[2]*p[3]) +
  23. 2*x[3]*log(p[3]) + x[4]*log(p[2]^2 + 2*p[2]*p[3]+p[3]^2) )
  24. return(l)
  25. }
  26. Q.prime = function(n,p){
  27. da = (2*n[1]+n[2]+n[3])/(p[1]) - (2*n[6]+n[3]+n[5])/(p[3])
  28. db = (2*n[4]+n[5]+n[2])/(p[2]) - (2*n[6]+n[3]+n[5])/(p[3])
  29. dQ = c(da,db)
  30. return(dQ)
  31. }
  32. Q.2prime = function(n,p){
  33. da2 = -(2*n[1]+n[2]+n[3])/(p[1]^2) - (2*n[6]+n[3]+n[5])/(p[3]^2)
  34. 51
  35. db2 = -(2*n[4]+n[5]+n[2])/(p[2]^2) - (2*n[6]+n[3]+n[5])/(p[3]^2)
  36. dab = -(2*n[6]+n[3]+n[5])/(p[3]^2)
  37. d2Q = matrix(c(da2,dab,dab,db2), nrow=2, byrow=TRUE)
  38. return(d2Q)
  39. }
  40. ## MAIN
  41. l.old = allele.l(x,p)
  42. for(i in 1:itr){
  43. alpha = alpha.default
  44. n = allele.e(x,p)
  45. p.new = p[1:2] - alpha*solve(Q.2prime(n,p))%*%Q.prime(n,p)
  46. p.new[3] = 1 - p.new[1] - p.new[2]
  47. if(p.new > 0 && p.new < 1){l.new = allele.l(x,p.new)}
  48. # REDUCE ALPHA UNTIL A CORRECT STEP IS REACHED
  49. while(p.new < 0 || p.new > 1 || l.new < l.old){
  50. alpha = alpha/2
  51. p.new = p[1:2] - alpha*solve(Q.2prime(n,p))%*%Q.prime(n,p)
  52. p.new[3] = 1 - p.new[1] - p.new[2]
  53. if(p.new > 0 && p.new < 1){l.new = allele.l(x,p.new)}
  54. }
  55. p = p.new
  56. prob.values[,i+1] = p
  57. l.old = l.new
  58. }
  59. ## OUTPUT
  60. p # FINAL ESTIMATE FOR ALLELE PROBABILITIES (p.c, p.i, p.t)
  61. ## PLOT OF CONVERGENCE
  62. pb = seq(0.001,0.4,length=100)
  63. c = outer(pb,pb,function(a,b){1-a-b})
  64. pbs = matrix(0,3,10000)
  65. pbs[1,] = rep(pb,100)
  66. pbs[2,] = rep(pb,each=100)
  67. pbs[3,] = as.vector(c)
  68. z = matrix(apply(pbs,2,allele.l,x=x),100,100)
  69. contour(pb,pb,z,nlevels=20)
  70. for(i in 1:itr){
  71. segments(prob.values[1,i],prob.values[2,i],prob.values[1,i+1],
  72. prob.values[2,i+1],lty=2)
  73. }

f

Output:

  1. > ## OUTPUT
  2. > p # FINAL ESTIMATE FOR ALLELE PROBABILITIES (p.c, p.i, p.t)
  3. [1] 0.03606708 0.19579915 0.76813377

41f.png-4.3kB

Code:

  1. ## INITIAL VALUES
  2. x = c(85, 196, 341,578)
  3. n = rep(0,6)
  4. p = rep(1/3,3)
  5. itr = 40
  6. prob.values = matrix(0,3,itr+1)
  7. prob.values[,1] = p
  8. alpha.default = 2
  9. ## FUNCTIONS
  10. allele.e = function(x,p){
  11. n.cc = (x[1]*(p[1]^2))/((p[1]^2)+2*p[1]*p[2]+2*p[1]*p[3])
  12. n.ci = (2*x[1]*p[1]*p[2])/((p[1]^2)+2*p[1]*p[2]+2*p[1]*p[3])
  13. n.ct = (2*x[1]*p[1]*p[3])/((p[1]^2)+2*p[1]*p[2]+2*p[1]*p[3])
  14. x2hat = x[2]+x[4]*((p[2]^2)+2*p[2]*p[3])/((p[2]^2)+2*p[2]*p[3]+p[3]*p[3])
  15. x3hat = x[3]+x[4]*(p[3]*p[3])/((p[2]^2)+2*p[2]*p[3]+p[3]*p[3])
  16. n.ii = (x2hat*(p[2]^2))/((p[2]^2)+2*p[2]*p[3])
  17. n.it = (2*x2hat*p[2]*p[3])/((p[2]^2)+2*p[2]*p[3])
  18. n = c(n.cc,n.ci,n.ct,n.ii,n.it,x3hat)
  19. return(n)
  20. }
  21. allele.m = function(x,n){
  22. p.c = (2*n[1]+n[2]+n[3])/(2*sum(x))
  23. p.i = (2*n[4]+n[5]+n[2])/(2*sum(x))
  24. p.t = (2*n[6]+n[3]+n[5])/(2*sum(x))
  25. p = c(p.c,p.i,p.t)
  26. return(p)
  27. }
  28. allele.l = function(x,p){
  29. l = ( x[1]*log(2*p[1] - p[1]^2) + x[2]*log(p[2]^2 + 2*p[2]*p[3]) +
  30. 2*x[3]*log(p[3]) + 2*x[4]*log(1-p[1]) )
  31. return(l)
  32. }
  33. allele.iy = function(n,p){
  34. iy.hat=matrix(0,2,2)
  35. iy.hat[1,1] = ((2*n[1]+n[2]+n[3])/(p[1]^2) +
  36. (2*n[6]+n[3]+n[5])/(p[3]^2))
  37. iy.hat[2,2] = ((2*n[4]+n[5]+n[2])/(p[2]^2) +
  38. (2*n[6]+n[3]+n[5])/(p[3]^2))
  39. iy.hat[1,2] = iy.hat[2,1] = (2*n[6]+n[3]+n[5])/(p[3]^2)
  40. return(iy.hat)
  41. }
  42. allele.l.2prime = function(x,p){
  43. l.2prime = matrix(0,2,2)
  44. l.2prime[1,1] = ( (-x[1]*(2-2*p[1])^2)/((2*p[1]-p[1]^2)^2) -
  45. 2*x[1]/(2*p[1]-p[1]^2) -
  46. (4*x[2])/((-2*p[1]-p[2]+2)^2) -
  47. 2*x[3]/(p[3]^2)
  48. -2*x[4]/(1-p[1])^2
  49. )
  50. l.2prime[2,2] = ( (-4*x[2]*p[3]^2)/((p[2]^2 + 2*p[2]*p[3])^2) -
  51. 2*x[2]/(p[2]^2 + 2*p[2]*p[3]) -
  52. 2*x[3]/(p[3]^2)
  53. )
  54. l.2prime[1,2] = ((-2*x[2])/((-2*p[1]-p[2]+2)^2) -
  55. 2*x[3]/(p[3]^2))
  56. l.2prime[2,1] = l.2prime[1,2]
  57. return(l.2prime)
  58. }
  59. ## MAIN
  60. l.old = allele.l(x,p)
  61. for(i in 1:itr){
  62. alpha = alpha.default
  63. n = allele.e(x,p)
  64. p.em = allele.m(x,n)
  65. p.new = (p[1:2] - alpha*solve(allele.l.2prime(x,p))%*%
  66. allele.iy(n,p)%*%(p.em[1:2]-p[1:2]))
  67. p.new[3] = 1 - p.new[1] - p.new[2]
  68. if(p.new > 0 && p.new < 1){l.new = allele.l(x,p.new)}
  69. # REDUCE ALPHA UNTIL A CORRECT STEP IS REACHED
  70. while(p.new < 0 || p.new > 1 || l.new < l.old){
  71. alpha = alpha/2
  72. p.new = (p[1:2] - alpha*solve(allele.l.2prime(x,p))%*%
  73. allele.iy(n,p)%*%(p.em[1:2]-p[1:2]))
  74. p.new[3] = 1 - p.new[1] - p.new[2]
  75. if(p.new > 0 && p.new < 1){l.new = allele.l(x,p.new)}
  76. }
  77. p = p.new
  78. prob.values[,i+1] = p
  79. l.old = l.new
  80. }
  81. ## OUTPUT
  82. p # FINAL ESTIMATE FOR ALLELE PROBABILITIES (p.c, p.i, p.t)
  83. ## PLOT OF CONVERGENCE
  84. pb = seq(0.001,0.4,length=100)
  85. c = outer(pb,pb,function(a,b){1-a-b})
  86. pbs = matrix(0,3,10000)
  87. pbs[1,] = rep(pb,100)
  88. pbs[2,] = rep(pb,each=100)
  89. pbs[3,] = as.vector(c)
  90. z = matrix(apply(pbs,2,allele.l,x=x),100,100)
  91. contour(pb,pb,z,nlevels=20)
  92. for(i in 1:itr){
  93. segments(prob.values[1,i],prob.values[2,i],prob.values[1,i+1],
  94. prob.values[2,i+1],lty=2)
  95. }

g

Output:

  1. > ## OUTPUT
  2. > p # FINAL ESTIMATE FOR ALLELE PROBABILITIES (p.c, p.i, p.t)
  3. [1] 0.03606708 0.19580543 0.76812749

41g.png-4.3kB

Code:

  1. ## INITIAL VALUES
  2. x = c(85, 196, 341, 578)
  3. n = rep(0,6)
  4. p = rep(1/3,3)
  5. itr = 20
  6. m = matrix(0,2,2)
  7. b = matrix(0,2,2)
  8. prob.values = matrix(0,3,itr+1)
  9. prob.values[,1] = p
  10. alpha.default = 2
  11. ## FUNCTIONS
  12. allele.e = function(x,p){
  13. n.cc = (x[1]*(p[1]^2))/((p[1]^2)+2*p[1]*p[2]+2*p[1]*p[3])
  14. n.ci = (2*x[1]*p[1]*p[2])/((p[1]^2)+2*p[1]*p[2]+2*p[1]*p[3])
  15. n.ct = (2*x[1]*p[1]*p[3])/((p[1]^2)+2*p[1]*p[2]+2*p[1]*p[3])
  16. x2hat = x[2]+x[4]*((p[2]^2)+2*p[2]*p[3])/((p[2]^2)+2*p[2]*p[3]+p[3]*p[3])
  17. x3hat = x[3]+x[4]*(p[3]*p[3])/((p[2]^2)+2*p[2]*p[3]+p[3]*p[3])
  18. n.ii = (x2hat*(p[2]^2))/((p[2]^2)+2*p[2]*p[3])
  19. n.it = (2*x2hat*p[2]*p[3])/((p[2]^2)+2*p[2]*p[3])
  20. n = c(n.cc,n.ci,n.ct,n.ii,n.it,x3hat)
  21. return(n)
  22. }
  23. allele.l = function(x,p){
  24. l = ( x[1]*log(2*p[1] - p[1]^2) + x[2]*log(p[2]^2 + 2*p[2]*p[3]) +
  25. 2*x[3]*log(p[3]) + 2*x[4]*log(1-p[1]) )
  26. return(l)
  27. }
  28. Q.prime = function(n,p){
  29. da = (2*n[1]+n[2]+n[3])/(p[1]) - (2*n[6]+n[3]+n[5])/(p[3])
  30. db = (2*n[4]+n[5]+n[2])/(p[2]) - (2*n[6]+n[3]+n[5])/(p[3])
  31. dQ = c(da,db)
  32. return(dQ)
  33. }
  34. Q.2prime = function(n,p){
  35. da2 = -(2*n[1]+n[2]+n[3])/(p[1]^2) - (2*n[6]+n[3]+n[5])/(p[3]^2)
  36. db2 = -(2*n[4]+n[5]+n[2])/(p[2]^2) - (2*n[6]+n[3]+n[5])/(p[3]^2)
  37. dab = -(2*n[6]+n[3]+n[5])/(p[3]^2)
  38. d2Q = matrix(c(da2,dab,dab,db2), nrow=2, byrow=TRUE)
  39. return(d2Q)
  40. }
  41. ## MAIN
  42. l.old = allele.l(x,p)
  43. for(i in 1:itr){
  44. alpha = alpha.default
  45. n = allele.e(x,p)
  46. m = Q.2prime(n,p) - b
  47. p.new = p[1:2] - alpha*solve(m)%*%Q.prime(n,p)
  48. p.new[3] = 1 - p.new[1] - p.new[2]
  49. if(p.new > 0 && p.new < 1){l.new = allele.l(x,p.new)}
  50. # REDUCE ALPHA UNTIL A CORRECT STEP IS REACHED
  51. while(p.new < 0 || p.new > 1 || l.new < l.old){
  52. alpha = alpha/2
  53. p.new = p[1:2] - alpha*solve(m)%*%Q.prime(n,p)
  54. p.new[3] = 1 - p.new[1] - p.new[2]
  55. if(p.new > 0 && p.new < 1){l.new = allele.l(x,p.new)}
  56. }
  57. at = p.new[1:2]-p[1:2]
  58. n = allele.e(x,p.new)
  59. bt = Q.prime(n,p)-Q.prime(n,p.new)
  60. vt = bt - b%*%at
  61. ct = as.numeric(1/(t(vt)%*%at))
  62. b = b + ct*vt%*%t(vt)
  63. p = p.new
  64. prob.values[,i+1] = p
  65. l.old = l.new
  66. }
  67. ## OUTPUT
  68. p # FINAL ESTIMATE FOR ALLELE PROBABILITIES (p.c, p.i, p.t)
  69. ## PLOT OF CONVERGENCE
  70. pb = seq(0.001,0.4,length=100)
  71. c = outer(pb,pb,function(a,b){1-a-b})
  72. pbs = matrix(0,3,10000)
  73. pbs[1,] = rep(pb,100)
  74. pbs[2,] = rep(pb,each=100)
  75. pbs[3,] = as.vector(c)
  76. z = matrix(apply(pbs,2,allele.l,x=x),100,100)
  77. contour(pb,pb,z,nlevels=20)
  78. for(i in 1:itr){
  79. segments(prob.values[1,i],prob.values[2,i],prob.values[1,i+1],
  80. prob.values[2,i+1],lty=2)
  81. }

h

Accoding to the figures of the above methods, we can tell the Aitken accerlerated EM is the most efficient.

4.5

a

Forward function:
Given for all . Then for any , we compute
Backward function:
Given for all . Then for any , we compute

b

C

The log-likelihood is :

So we can take and as laten variables. The proposed fomular is the according MLE when fixing and .

d

Output:

  1. $pi
  2. [1] 0.5 0.5
  3. $p
  4. [,1] [,2]
  5. [1,] 0.5 0.5
  6. [2,] 0.5 0.5
  7. $e
  8. [,1] [,2]
  9. [1,] 0.45 0.55
  10. [2,] 0.45 0.55

Code

  1. x = as.matrix(read.table("coin.dat",header=TRUE))+1
  2. n = 200
  3. pi = c(0.5,0.5)
  4. e = matrix(rep(0.5,4),ncol=2)
  5. p = matrix(rep(0.5,4),ncol=2)
  6. cal_alpha = function(pi,e,p,x)
  7. {
  8. alpha_new = matrix(rep(0,n*2),ncol=2)
  9. alpha_new[1,1] = pi[1]*e[1,x[1]]
  10. alpha_new[1,2] = pi[2]*e[2,x[1]]
  11. for (i in 2:n)
  12. {
  13. alpha_new[i,1] = alpha_new[i-1,1]*p[1,1]*e[1,x[i]] + alpha_new[i-1,2]*p[2,1]*e[1,x[i]]
  14. alpha_new[i,2] = alpha_new[i-1,1]*p[1,2]*e[2,x[i]] + alpha_new[i-1,2]*p[2,2]*e[2,x[i]]
  15. }
  16. return(alpha_new)
  17. }
  18. cal_beta = function(pi,e,p,x)
  19. {
  20. beta_new = matrix(rep(0,n*2),ncol=2)
  21. beta_new[n,1] = 1
  22. beta_new[n,2] = 1
  23. for (i in (n-1):1)
  24. {
  25. beta_new[i,1] = p[1,1]*e[1,x[i+1]]*beta_new[i+1,1] + p[1,2]*e[2,x[i+1]]*beta_new[i+1,2]
  26. beta_new[i,2] = p[2,1]*e[1,x[i+1]]*beta_new[i+1,1] + p[2,2]*e[2,x[i+1]]*beta_new[i+1,2]
  27. }
  28. return(beta_new)
  29. }
  30. estep = function(x,alpha,beta,p,e){
  31. Nh = c(0,0)
  32. Nhh = matrix(c(0,0,0,0),ncol=2)
  33. Nho = matrix(c(0,0,0,0),ncol=2)
  34. Nh = c(alpha[1,1]*beta[1,1],alpha[1,2]*beta[1,2])
  35. for(i in 1:2)
  36. for(j in 1:2)
  37. for(ii in 1:(n-1))
  38. Nhh[i,j] = Nhh[i,j]+alpha[ii,i]*p[i,j]*e[j,x[ii+1]]*beta[ii+1,j]
  39. for(i in 1:2)
  40. for(j in 1:2){
  41. for(ii in 1:(n))
  42. if(x[ii] == j)
  43. {
  44. Nho[i,j] = Nho[i,j]+alpha[ii,i]*beta[ii,i]
  45. }
  46. }
  47. return(list(Nh=Nh,Nhh=Nhh,Nho=Nho))
  48. }
  49. mstep = function(Nh,Nhh,Nho){
  50. new_pi = Nh/sum(Nh)
  51. new_p = Nhh/rowSums(Nhh)
  52. new_e = Nho/rowSums(Nho)
  53. return(list(pi=new_pi,p=new_p,e=new_e))
  54. }
  55. iternum = 10
  56. for(iter in 1:iternum)
  57. {
  58. alpha = cal_alpha(pi,e,p,x)
  59. beta = cal_beta(pi,e,p,x)
  60. estep_list = estep(x,alpha,beta,p,e)
  61. mstep_list = mstep(estep_list$Nh,estep_list$Nhh,estep_list$Nho)
  62. pi = mstep_list$pi
  63. p = mstep_list$p
  64. e = mstep_list$e
  65. print(mstep_list)
  66. }
添加新批注
在作者公开此批注前,只有你和作者可见。
回复批注