کدنویسی کتاب شبیه سازی روبینشتاین با نرم افزار R توسط سایت آر استودیو انجام شده است و تمامی حقوق این برنامه ها متعلق به این سایت است. بنابراین خواهشمندیم در صورت استفاده از این کدها، منبع آن را ذکر نمایید.
بیشتر بخوانید: دانلود رایگان حل تمرین شبیه سازی روبینشتاین
توجه داشته باشید که این برنامه ها از زبان یک فرد مسلط به برنامه نویسی با نرم افزار R انجام شده است و ممکن است هزاران طریق دیگر برای حل کردن یک مسئله وجود داشته باشد. بنابراین شما به این کدها اکتفا نکنید و حالت های دیگر را نیز امتحان کنید.
با آرزوی موفقیت برای دانشجویان عزیز رشته آمار:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 |
# r-studio.ir # All rights reserved for Afshin Motavali # افشین متولی، برنامه نویس نرم افزارهای آماری x <- runif(1,0,2) ; x rnorm(2) y <- runif(4) ; y set.seed(1234) # Algorithm 2.3.1 for example: Generating Exponentials. p.35 Nsim <- 10000 U <- runif(Nsim); U X <- -log(1-U) hist(X,freq=F,main="Exp from Uniform") plot(X) |
1 2 3 4 5 6 7 8 9 10 11 12 13 |
# Example: Chi-squared U <- runif(10000) X <- -log(U) #uniform to exponential Y <- 2*sum(X) ; Y #sum up to get chi square with 2*10^4 # Discrete Distributions # Gamma n <- runif(10000) X <- -log(n) #uniform to exponential Y <- sum(X) #exponential to chi squared lambda = 1 X <- lambda * sum(X) #chi squared to gamma X |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 |
# Alias method ==================================== #install.packages("Runuran") library("Runuran") gen <- dau.new(pv=dbinom(0:115,115,0.5), from=0) x <- ur(gen,100) ## Alternative approach distr <- udbinom(size=100,prob=0.3) gen <- daud.new(distr) x <- ur(gen,100) ## Algorithm 2.3.4 (Composition Method) #lambda=1 mu1 <- 1 ;mu2 <- 1.2; mu3 <- 4 x1 <- rexp(100,rate=mu1) ; x2 <- rexp(100,rate=mu2); x3 <- rexp(100,rate=mu3) F1_x <- 1-exp(-x1) ; F2_x <- 1-exp(-x2) ; F3_x <- 1-exp(-x3) p1 <- .1 ; p2 <- .3 ; p3 <- .6 F <- (p1*F1_x)+(p2*F2_x)+(p3*F3_x) ### example1 Y <- rchisq(100, 3) X_given_y <- rnorm(100,0,3/Y) |
1 2 3 4 5 |
### example2 Y <- rgamma(100,20,1) X_given_y <- rpois(100,Y) plot(X_given_y) hist(X_given_y) |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
# Algorithm 2.3.6 (Modified Acceptance-Rejection Algorithm) n <- 100 # number of random draws Z <- rep(NA, n) # vector to be filled with random draws c <- 3 # rescaling coefficient for (i in 1:n){ X <- runif(1, 0, 1) # X~g(x) majorizing function (uniform from 0 to 1) U <- runif(1, 0, 1) # will be used later to accept with probability r r <- dbeta(X, 2, 3) / ( c * dunif(X) ) # r=f(X)/Cg(X) accept probability if (U <= r){ Z[i] <- X } else { next } } length(Z) |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 |
#Example 2.2 n <- 100 Z <- rep(NA, n) c <- 2 for (i in 1:n){ X <- runif(1, 0, 1) # X~g(x) majorizing function (uniform from 0 to 1) U <- runif(1, 0, 1) # will be used later to accept with probability r if (U <= X){ Z[i] <- X } else { next } } |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 |
#Example 2.3 n <- 100 Z <- rep(NA, n) c <- 4/pi for (i in 1:n){ R <- runif(1,-1,1) U1 <- runif(1, 0, 1) U2 <- runif(1, 0, 1) X <- 2*(U2-1)*R fcg <- sqrt(1-(2*U2-1)^2) if (U1 <= fcg){ Z[i] <- X } else { next } } |
1 2 3 4 5 6 7 8 9 10 11 12 |
# Algorithm 2.4.3 (Sampling from the Gamma(alpha, 1) Distribution alpha=10; N=100; samp=c() d=alpha-1/3; c=1/(3*d) for(i in 1:N){ Z=rnorm(1) U=runif(1) h_z=(1-3*alpha)*log(1+c*Z)-d*((1+c*Z)^3)+d if(Z>-1/c && U<h_z+.5*(Z^2)){ X=d*((1+c*Z)^3) } samp[i]=X} samp |
1 2 3 4 5 |
# Algorithm 2.4.4 (Generation of an Erlang Random Variable) m=100; lambda=2 U=runif(m) Erlang = (-1/lambda)*sum(log(U)) Erlang |
1 2 3 4 |
#Algorithm 2.4.5 (Generation of a Beta Random Variable) alpha <- 2; beta <- 3 Y1 <- rgamma(1,alpha,1) ; Y2 <- rgamma(1,beta,1) X <- Y1/(Y1+Y2) ; X # X~beta(alpha=2,beta=3) |
1 2 3 4 5 |
#Algorithm 2.4.6 (Generation of a Beta Random Variable with Integer Parameters) n <- 15; m <- 10 U <- runif(m+n-1) indices <- order(U, decreasing = F) U_m <- U[indices][m]; U_m # U_m ~ beta(m,n) |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 |
# Algorithm 2.4.7 (Generation of a Bernoulli Random Variable) # Approach 1: using a built-in package n=100;p=.5 #install.packages("LaplacesDemon") library("LaplacesDemon") X <- rbern(n,p) # Bernoulli(n,p) Y <- sum(X) ;Y # Binomial(n,p) # Approach 2 U <- c(); X <- c(); n <- 100 for(i in 1:n) { U <- runif(1) if(U <= p) {X[i] <- 1} else {X[i] <- 0} } # X~Bernoulli Y <- sum(X); Y # Y~Binomial(n,p) |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 |
# Binomial Generation using Normal approximation page 64 n=100; for(i in 1:n){ p=runif(1) Z=rnorm(1) X[i]=max(0,floor(n*p-.5+Z*sqrt(n*p*(1-p)))) } X # X~Bin(n,p) # To enhance efficiency we may want to perform following approach n=100; for(i in 1:n){ p=runif(1) Z=rnorm(1) Y1=max(0,floor(n*p-.5+Z*sqrt(n*p*(1-p)))) Y2=max(0,floor(n*(1-p)-.5+Z*sqrt(n*(1-p)*p))) if(p<=0.5){X[i]=Y1} else{X[i]=Y2} } X |
1 2 3 4 |
# Algorithm 2.4.9 (Generation of a Geometric Random Variable) p <- .6 Y <- rexp(100,rate=-log(1-p)) X <- 1+floor(Y); X # X~Ge(p) |
1 2 3 4 5 6 7 8 |
# Algorithm 2.4.10 (Generating a Poisson Random Variable) a=c(); a[1] <- 1; lambda <- 3 for(n in 2:20){ U <- runif(1) a[n] <- a[n-1]*U if(a[n] < exp(-lambda)) {X[n]=n-1} } X |
1 2 3 4 5 6 7 8 |
#Algorithm 2.5.1: Vector Acceptance-Rejection Method n=10 for(i in 1:n){ U <- runif(n) X <- (1-2*U) R <- crossprod(X) if(R<=1){print(X); break} # R should satisfy the requirement } |
1 2 3 4 5 6 7 8 9 |
# Algorithm 2.5.3 n <- 10; U <- c(); Y <- c(); X <- c() U <- runif(n) X <- order(U) Y[1] <- X[1] for(i in 2:n) { Y[i] <- X[i]-X[i-1] } print(Y) |
1 2 3 4 5 6 7 |
#Algorithm 2.5.4\ X <- matrix(0,n,1) n <- 5 X <- rnorm(n) normX <- sqrt(crossprod(X)) # Euclidean norm R <- U^(1/5) R*X/normX |
1 2 3 4 5 6 7 8 9 10 |
# Algorithm 2.6.1 (Generating a Homogeneous Poisson Process) T=c(); n=10; lambda=2; t=3 T[1]=1 for(i in 2:n){ U[i]=runif(1) T[i]=T[i-1]-(1/lambda)*log(U[i]) if(T[i]>t){break} else{next} } T |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
# Example 2.9 Random Walk on the Integers p=.5; n=200 X=c(); I=c() X[1]=0 sum=0 for(i in 1:n){ u=runif(1) if(u<=p){ I[i]=1} else {I[i]=0} X[i]=sum sum=sum+2*I[i]-1 } plot(X); abline(h=0) lines(X,col="red") |
1 2 3 4 5 6 7 8 9 |
# Algorithm 2.8.1 (First Algorithm for Generating Random Permutations) n=10 U=runif(n) indice=order(U) O=c() for(i in 1:n){ O[i]=(indice[i]) } O |
1 2 3 4 5 6 7 8 9 10 11 12 |
# Algorithm 2.8.2 (Second Algorithm for Generating Random Permutations) library("e1071") library("purrr") n=100 p=seq(1:n); X=c() for(i in 1:n){ x=rdunif(1,b=n,a=1) p=p[-x] X[i]=x if(i>n){break} } X |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 |
### Procedure (Random Walk Sampler) Page 171 n=10^5; X=matrix(0,n,2); Z=c(); Y=c() X[1,]=c(rnorm(2)) for(t in 2:n){ Z=c(rnorm(1),rnorm(1)) Y=X[t-1,]+2*Z a=((Y[1])^2)*((Y[2])^2)+(Y[1])^2+(Y[2])^2-8*Y[1]-8*Y[2] b=((X[t-1,1])^2)*((X[t-1,2])^2)+(X[t-1,1])^2+(X[t-1,2])^2-8*X[t-1,1]-8*X[t-1,2] d=exp((b-a)/2); alfa=min(d,1); if(runif(1)<alfa){X[t,]=Y} else{X[t,]=X[t-1,]} } plot(X[,1],X[,2],col="green") hist(X[,1],prob=T,breaks = c(length(X[,1])*0.001),col="blue") lines(density(X[,1]),col = 2, lty = 4, lwd = 3) hist(X[,2],prob=T,breaks = c(length(X[,1])*0.001),col="blue") lines(density(X[,2]),col = 2, lty = 4, lwd = 3) mean(X) # CMC Estimate or Posterior mean of all samples acf(X[,1],type = c("covariance"),lag.max = 400,plot = TRUE) |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 |
# EXAMPLE 6.5 Example 6.4 (Continued) n=10000 X =matrix(0,n,2) X[1,1]=rnorm(1);X[2,1]=rnorm(1) for(t in 2:n){ a=t%%2 if(a==1){ z=rnorm(1) a1=1/(1+(X[t-1,2])^2) X[t,2]=4*a1+z*sqrt(a1) X[t,1]=X[t-1,1] } else if(a==0) z=rnorm(1) a1=1/(1+(X[t-1,1])^2) X[t,1]=4*a1+z*sqrt(a1) X[t,2]=X[t-1,2] } mean(X[9000:10000,1]) hist(X[9000:10000,1]) plot.ts(X[,1]) plot.ts(X[9000:10000,1]) |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 |
# Ising model install.packages("bayess") library("bayess") image(1:20,1:20,isingibbs(10,20,20,beta=0.6)) # Potts model install.packages("potts") library("potts") ncolor <- as.integer(4) beta <- log(1 + sqrt(ncolor)) theta <- c(rep(0, ncolor), beta) nrow <- 100 ncol <- 100 x <- matrix(1, nrow = nrow, ncol = ncol) foo <- packPotts(x, ncolor) out <- potts(foo, theta, nbatch = 10) out$batch image(out$batch) |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 |
# EXAMPLE 6.8 Poisson Disruption Problem install.packages("e1071") library("e1071") n=100 K=30 a1=2;a2=3;b1=4;b2=3;c1=2;c2=3 eta1=3;eta2=5 lambda1=rgamma(1,a1,eta1);lambda2=rgamma(1,a2,eta2) X=matrix(0,1,n) for(i in 1:K){ X[i]=rpois(1,lambda1) } for(i in (K+1):n){ X[i]=rpois(1,lambda2) } theta.real=c(lambda1,lambda2,eta1,eta2,K) N=10000 theta.s=c(1.5,.5,2,4,30) # Starting point X1=matrix(0,N,5) theta.post=c(); p=c() for(j in 1:N){ aa1=a1+sum(X[1:K]); aa2 = theta.s[5]+theta.s[3] theta.post[1]=rgamma(1,aa1,aa2) bb1=a2+sum(X[(K+1):n]); bb2 = n-theta.s[5]+theta.s[4] theta.post[2]=rgamma(1,bb1,bb2) cc1=a1+b1; cc2=theta.post[1]+c1 theta.post[3]=rgamma(1,cc1,cc2) dd1=a2+b2;dd2=theta.post[2]+c2 theta.post[4]=rgamma(1,dd1,dd2) for(i in 1:100){ p[i]=(1/n)*exp(-(theta.s[5]*(theta.post[1]+theta.post[2])))*(theta.post[1]/theta.post[2])^(sum(X[1:theta.s[5]])) p=p/sum(p) } theta.post[5]=pdiscrete(1,p) X1[j,]=theta.post theta.post = theta.s } lambda1.post.mean=mean(X1[,1]);lambda1.post.mean lambda2.post.mean=mean(X1[,2]);lambda2.post.mean eta1.post.mean=mean(X1[,3]);eta1.post.mean eta2.post.mean=mean(X1[,4]);eta2.post.mean K.post.mean=mean(X1[,5]);K.post.mean plot(X1[,1]) # Scatter plot for Lambda1 posterior |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
## EXAMPLE 6.9 Slice Sampler X=c() f=function(x){{(x*exp(-x))/(1+x)}} f1=function(x) if(x>=0){(x/(1+x))} f2=function(x) if(x>=0){exp(-x)} X[1]=1; N=10^5 for(t in 1:(N-1)){ u1=runif(1);u2=runif(1) Y1=u1*f1(X[t]);Y2=u2*f2(X[t]) #if(f1(X)>=Y1 & f2(X)>=Y2){X[t+1]=X[t]} #f=function(x) if(x>=0){(x*exp(-x))/(1+x)} left=(u1*f1(X[t]))/(1-(u1*f1(X[t]))) ; right=-log(u2*f2(X[t])) X[t+1]=runif(1,left,right) } hist(X,prob=T) curve(f,add=TRUE) |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 |
# Reversible Jump Sampler # EXAMPLE 6.11 library("e1071") library("mvtnorm") library("MASS") beta0=1; beta1=0.3; beta2=-0.2; u=c(); y=c() for(i in 1:101){ u[i]=(i-1)/20 y[i]=1+0.3*u[i]-0.2*(u[i])^2+rnorm(1)} # Data generated # Algorithm Started N=1000; X=matrix(0,N,4); beta=c(); p=c(1/3,1/3,1/3) X[1,]=c(2,1,.5,.4) # start c(m,beta0,beta1,beta2) m.new=c() for(k in 2:N){ m.old=X[k-1,1]; beta.old=X[k-1,2:4] m.new=rdiscrete(1,p) # uniform between 0 and 1 and 2 mu=matrix(0,m.old+1,1); sigma2=diag(1,m.old+1,m.old+1) beta.new=mvrnorm(1,mu,sigma2) #z=c(m.new,beta.new) a1=0;a2=0 for(i1 in 1:101){ for(j1 in 1:m.new){ a1=a1+beta.new[j1]*(u[i1])^j1} a2=a2+(y[i1]-a1)^2} b1=0;b2=0 for(i2 in 1:101){ for(j2 in 1:m.old){ b1=b1+beta.old[j2]*(u[i2])^j2} b2=b2+(y[i2]-b1)^2} c1=1 for(l1 in 1:m.old){ c1=c1*((1/sqrt(2*pi))*exp(-.5*sum(beta.old^2))) } c2=1 for(l2 in 1:m.new){ c2=c2*((1/sqrt(2*pi))*exp(-.5*sum(beta.new^2))) } rho=exp(a1-b1)*(c1/c2) alpha=min(rho,1) U <- runif(1) if(U<=alpha) { X[k,1]=m.new;X[k,2:4]=beta.new } else { X[k,1]=m.old;X[k,2:4]=beta.old } #if(U<=alpha){X[k,2:4]=beta.new} #else{X[k,2:4]=beta.old} #; beta.new=X[k,2:4] } #; beta.old=X[k,2:4]} } head(X) plot(u,y) lines(X[,1]) plot(X[,2],col="red") # Excercise 2.22 Z=c(); N=10000 for(i in 1:N){ U1=runif(1); X=(5/2)*sqrt(U1) U2=runif(1) if(X>0 && X<1) a <- 1 else if(X>=1 && X<5/2) a <- 1/X if(U2<=a) {Z[i]=X} else{next} } hist(Z) |
1 2 3 4 5 6 7 |
# Excercise 5.14 N=2000000 U=runif(N) X=4-log(U) w=(exp(-.5*(X^2))*exp(X-4))/sqrt(2*pi) ell=mean(w) re=sd(w)/(sqrt(N)*ell); re |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 |
# Excercise 6.2 N=5000 sample = c(); R=c() sigma=0.1 X = rnorm(1)*sigma # initializing point for(k in 1:N){ Y=X+sigma*rnorm(1) # generate the proposal move U=runif(1) alpha=min(exp(-0.5*((Y-10)^2)+0.5*((X-10)^2)),1) if(U<alpha){X=Y} # Update X else{next} sample[k]=X # Store sample } plot(sample,xlab="iteration t",ylab="State of the chain X_t") |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 |
# Excercise 6.4 N=10^5 lam=0.2 x=1 # initial state sample=c() for(k in 1:N){ u=runif(1); y=-(1/lam)*log(u) # proposal move #alpha=min(exp(-(y+lam*x)+(x+lam*y)),1) alpha=min(exp((lam-1)*y+(1-lam)*x),1) if(u<alpha){x=y} else{next} sample[k]=x } hist(sample,probability=T) |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 |
# Excercise 6.5 N=10^5 lam=20; x=1; sample=c() for(k in 1:N){ u=runif(1) rv=-(1/lam)*u # generate from double exponential rv=sign(u-0.5)*rv Y=x+rv # Proposal move alpha=min(exp(x-Y)*(Y>0),1) if(u<alpha){x=Y} else{next} sample[k]=x } hist(sample) |
1 2 3 4 5 6 7 8 9 10 11 12 |
# Excercise 6.6 x=0 # initializing state y=0 rho=0.9 # correlation coefficient N=10^4 sample=matrix(0,nrow=N,ncol=2) for(k in 1:N){ x=rnorm(1)*sqrt(1-rho^2)+rho*y y=rnorm(1)*sqrt(1-rho^2)+rho*x sample[k,]=c(x,y) } plot(sample[,1],sample[,2]) |
1 2 3 4 5 6 7 8 9 10 11 12 13 |
# Excercise 6.20 N=50000 sample=c() sigma=0.1 X=rnorm(1)*sigma for(k in 1:N){ Y=X+rnorm(1)*sigma; U=runif(1) if(U<min(1,exp(-0.5*(Y-10)^2+0.5*(X-10)^2))){ X <-Y } sample[k] <- X } plot.ts(sample,xlab="Iteration t",ylab ="State of the chain X_t") |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 |
# Excercise 6.21 #define the function S(x) S=function(x) abs((sin(10*x)^8+cos(5*x+1)^5)/(x^2-x+1)) x=0; T=1; beta=0.9999; sigma=0.1; N=10^3 results=matrix(0,nrow=N,ncol=2); S_best=0; for(t in 1:N){ y=x+rnorm(1)*sigma; S1=S(x); S2=S(y); alpha=min(exp((S2-S1)/T),1); if(runif(1)<alpha){x=y} T=beta*T; if(max(S1,S2)> S_best){S_best=max(S1,S2)} results[t,]=cbind(t,S_best) } plot(results[,1],results[,2]) max(results[,2]) |
1 2 3 4 5 6 7 8 9 10 11 12 13 |
# Problem 6.1 P.194 N=5000 Sample=c() Sigma=0.1 X=rnorm(1)*Sigma for(j in 1:N){ Y=X+rnorm(1)*Sigma if(runif(1)<min(exp(-.5*((Y-10)^2)+.5*((X-10)^2)),1)){ X=Y} Sample[j]=X } plot(Sample,type="l",lwd=2,col="blue") |
1 2 3 4 5 6 7 8 9 |
# Problem 6.6 x=0 y=0 rho=.9 Sample=matrix(NA,nrow = 10^4,ncol = 2) for(j in 1:10^4){ x=rnorm(1)*sqrt(1-rho^2)+rho*y y=rnorm(1)/sqrt(1-rho^2)+rho*x Sample[j,]=c(x,y) |
منتظر آموزش های بیشتر از سوی سایت آر استودیو باشید.
در ادامه کدنویسی کتاب شبیه سازی روبینشتاین که کدنویسی محسوب می شود، به زودی آموزش نرم افزار R با پیشرفته ترین و اصولی ترین روش دنیا به زبان فارسی و به صورت کاملاً رایگان بر روی سایت قرار خواهد گرفت. برای مشاهده لینک کتاب اصلی به اینجا مراجعه کنید.
با ما همراه باشید…
حتماً بخوانید: