SlideShare uma empresa Scribd logo
1 de 233
Baixar para ler offline
Twitter
990
10
583.7
170.1
http://www.toeic.or.jp/toeic/about/data/data_avelist/data_ave01_04.html
http://www.toeic.or.jp/toeic/about/data/data_avelist/data_dist01_04.html
990
10
583.7
170.1
http://www.toeic.or.jp/toeic/about/data/data_avelist/data_ave01_04.html
http://www.toeic.or.jp/toeic/about/data/data_avelist/data_dist01_04.html
D = {x1, x2, · · · , xn}
¯x =
1
n
nX
i=1
xi
2
=
1
n
nX
i=1
(xi ¯x)2
=
v
u
u
t 1
n
nX
i=1
(xi ¯x)2
=
1
n
nX
i=1
|xi ¯x|
=
1
n
nX
i=1
(xi ¯x)2
p
=
v
u
u
t 1
N
NX
i=1
(xi ¯x)2
p
probability
! 2 ⌦ = {!1, !2, · · · , !m}
⌦ = { , }
! 2 { , }
!(1)
= !(2)
=
!(n)
=
⌦ = {1, 2, 3, 4, 5, 6}
!(1)
= !(2)
=
!(n)
=
⌦ = {!1, !2, · · · , !49870000}
!(1)
= !43890298 = 171cm
!(2)
= !29184638 = 168cm
!(n)
= !51398579 = 174cm
!(1)
= !(2)
=
!(n)
=!(3)
=
!1 !2 !3 !4 !5 !6 !7 !8 !9 !10
= {!1, !2, !3, · · · , !10}
! 2 ⌦ = {ID1, ID2, ID3, · · · , ID10}
⌦ !
!
X = X(!)
⌦ !
!
X(!1) = 0
X(!2) = 0
X(!3) = 0
X(!4) = 0
X(!5) = 0
X(!6) = 0
X(!7) = 0
X(!8) = 0
X(!9) = 0
X(!10) = 100
!
{! 2 ⌦ : X(!) 2 A}
{X 2 A}
X(!) X
{! 2 ⌦ : X(!) 2 A}
!1 !2 !3 !4 !5 !6 !7 !8 !9 !10
A X(!) = 100Ac
X(!) = 0
!5 or !9
PX (A) = P(X 2 A) = P({! 2 ⌦ : X(!) 2 A})
⌦
!5, !9 !5, !9
PX (A) =
#({! 2 ⌦ : X(!) 2 A})
#( )
=
#(!5, !9)
#( )
=
2
10
= 0.2
PX(⌦) = 1
A1, A2, · · ·
PX ([iAi) =
X
i
PX (Ai)
A1
A2
A3
A4
A5
A6
A7
A8
A9
A10
A11
A12
0  PX(A)  1
X = X(!)
⌦
A
A
!1
!2
!3
!4
!5
!6
!7
!8
!11
!10
!9
!12
!13
!14
!15
!16
B
C
D
X(!) = 0
X(!) = 0
#A = #{! 2 ⌦ : X(!) = 0} = 7
#B = #{! 2 ⌦ : X(!) = 1} = 2
#C = #{! 2 ⌦ : X(!) = 2} = 4
#D = #{! 2 ⌦ : X(!) = 3} = 3
⌦
A
A
!1
!2
!3
!4
!5
!6
!7
!8
!11
!10
!9
!12
!13
!14
!15
!16
B
C
DX(!) = 0
P(X = 0) = PX(A) =
#{! 2 ⌦ : X(!) = 0}
#⌦
=
7
16
P(X = 1) = PX (B) =
#{! 2 ⌦ : X(!) = 1}
#⌦
=
2
16
P(X = 2) = PX(C) =
#{! 2 ⌦ : X(!) = 2}
#⌦
=
4
16
P(X = 3) = PX(D) =
#{! 2 ⌦ : X(!) = 3}
#⌦
=
3
16
{x1, x2, · · · , xk}
P(X = xi) = f(xi)
F(x) = P(X  x)
P(x < X  x + x)
x + xx
x x ! 0
f(x) = lim
x!0
P(x < X  x + x)
x
x + xx
f(x)
F(x) = P(X  x) =
Z x
1
f(u)du
f(a < x < b) =
Z b
a
f(x)dx
http://www.math.wm.edu/~leemis/2008amstat.pdf
P(X = x) = px
(1 p)1 x
(x = 0, 1)
#
#
p = 0.7
trial_size = 10000
set.seed(71)
#
data <- rbern(trial_size, p)
#
dens <- data.frame(y=c((1-p),p)*trial_size, x=c(0, 1))
#
ggplot() +
layer(data=data.frame(x=data), mapping=aes(x=x), geom="bar",
stat="bin", bandwidth=0.1
) + layer(data=dens, mapping=aes(x=x, y=y), geom="bar",
stat="identity", width=0.05, fill="#777799", alpha=0.7)
(x = 0, 1, · · · , n)
#
p = 0.7
trial_size = 10000
sample_size = 30
set.seed(71)
#
gen_binom_var <- function() {
return(sum(rbern(sample_size, p)))
}
result <- rdply(trial_size, gen_binom_var())
#
dens <- data.frame(y=dbinom(seq(sample_size),
sample_size, 0.7))*trial_size
#
ggplot() +
layer(data=resuylt, mapping=aes(x=V1), geom="bar", stat = "bin",
binwidth=1, fill="#6666ee", color="gray"
) + layer(data=dens, mapping=aes(x=seq(sample_size)+.5, y=y),
geom="line", stat="identity", position="identity",colour="red"
) + ggtitle("Bernoulli to Binomial.")
P(X = x) =
e x
x!
trial_size = 5000; width <- 1;
#
p = 0.7; n = 10;
np <- p*n
# n!∞ p!0 np=
n = 100000; p <- np/n
#
gen_binom_var <- function() {
return(sum(rbern(n, p)))
}
result <- rdply(trial_size, gen_binom_var())
#
dens <- data.frame(y=dpois(seq(20), np))*trial_size
#
ggplot() +
layer(data=result, mapping=aes(x=V1), geom="bar", stat = "bin",
binwidth=width, fill="#6666ee", color="gray"
) + layer(data=dens, mapping=aes(x=seq(20)+.5, y=y),
geom="line", stat="identity", position="identity",
colour="red"
) + ggtitle("Bernoulli to Poisson.")
f(x) =
1
p
2⇡ 2
exp
⇢
1
2
(x µ)2
2
( 1 < x < 1)
#
n <- 10000; p <- 0.7;
trial_size = 10000
width=10
#
gen_binom_var <- function() {
return(sum(rbern(n, p)))
}
result <- rdply(trial_size, gen_binom_var())
#
dens <- data.frame(y=dnorm(seq(6800,7200), mean=n*p,
sd=sqrt(n*p*(1-p)))*trial_size*width)
#
ggplot() +
layer(data=result, mapping=aes(x=V1), geom="bar", stat = "bin",
binwidth=width, fill="#6666ee", color="gray"
) + layer(data=dens, mapping=aes(x=seq(6800,7200), y=y),
geom="line", stat="identity", position="identity",
colour="red") + ggtitle("Bernoulli to Normal.")
( 1 < x < 1)
f(x) =
1
p
2⇡
exp
⇢
1
2
x2
#
n <- 10000; p <- 0.7
trial_size = 30000
width=0.18
#
gen_binom_var <- function() {
return(sum(rbern(n, p)))
}
result <- rdply(trial_size, gen_binom_var())
m <- mean(result$V1); sd <- sd(result$V1);
result <- (result - m)/sd
#
dens <- data.frame(y=dnorm(seq(-4,4,0.05), mean=0,
sd=1)*trial_size*width)
#
ggplot() +
layer(data=result, mapping=aes(x=V1), geom="bar", stat = "bin",
binwidth=width, fill="#6666ee", color="gray"
) + layer(data=dens, mapping=aes(x=seq(-4,4,0.05), y=y),
geom="line", stat="identity", position=“identity",
colour="red"
) + ggtitle("Bernoulli to Standard Normal.")
f(x, k) =
(1/2)k/2
(k/2)
xk/2 1
e x/2
(0  x)
Xi
Z = X2
1 + · · · + X2
k
#
p <- 0.7; n <- 1000;
trial_size <- 100000; width <- 0.3;
df <- 3
# (3 )
gen_binom_var <- function() {
return(sum(rbern(n, p)))
}
gen_chisq_var <- function() {
result <- rdply(trial_size, gen_binom_var())
return(((result$V1 - mean(result$V1))/sd(result$V1))**2)
}
# df
result <- rlply(df, gen_chisq_var(),.progress = "text")
res <- data.frame(x=result[[1]] + result[[2]] + result[[3]])
# ( =3)
xx <- seq(0,20,0.1)
dens <- data.frame(y=dchisq(x=xx, df=df)*trial_size*width)
#
ggplot() + layer(data=data, mapping=aes(x=x), geom="bar", stat = "bin",
binwidth=width, fill="#6666ee", color="gray"
) + layer(data=dens, mapping=aes(x=xx, y=y),
geom="line", stat="identity", position="identity",
colour="blue" ) + ggtitle("Bernoulli to Chisquare")
f(x, ) =
⇢
e x
(x 0)
0 (x < 0)
trial_size = 7000; width <- .01;
#
p = 0.7; n = 10; np <- p*n;
# n!∞ p!0 np=
n = 10000; p <- np/n
#
gen_exp_var <- function() {
cnt <- 0
while (TRUE) {
cnt <- cnt + 1
if (rbern(1, p)==1){
return(cnt) # 1
}
}
}
data <- data.frame(x=rdply(trial_size, gen_exp_var())/n)
names(data) <- c("n", "x")
#
dens <- data.frame(y=dexp(seq(0, 1.5, 0.1), np)*trial_size*width)
ggplot() +
layer(data=data, mapping=aes(x=x), geom="bar", stat = "bin",
binwidth=width, fill="#6666ee", color="gray"
) + layer(data=dens, mapping=aes(x=seq(0, 1.5, 0.1), y=y),
geom="line", stat="identity", position="identity", colour="red"
) + ggtitle("Bernoulli to Exponential.")
f(x, ↵, ) =
↵
(↵)
x↵ 1
exp( x)
(0  x < 1)
↵X
i=1
Xi ⇠ (↵, )Xi ⇠ Exp( )
trial_size = 7000; width <- .035;
#
p = 0.7; n = 10; np <- p*n;
# n!∞ p!0 np=
n = 10000; p <- np/n; alpha <- 5
#
get_interval <- function(){
cnt <- 0
while (TRUE) {
cnt <- cnt + 1
if (rbern(1, p)==1){ return(cnt) }
}
}
gen_exp_var <- function() {
data <- data.frame(x=rdply(trial_size, get_interval())/n)
names(data) <- c("n", "x")
return(data)
}
result <- rlply(alpha, gen_exp_var())
data <- data.frame(x=result[[1]]$x + result[[2]]$x + result[[3]]$x + result[[4]]$x +
result[[5]]$x)
#
dens <- data.frame(y=dgamma(seq(0, 3,.01), shape=alpha, rate=np)*trial_size*width)
ggplot() +
layer(data=data, mapping=aes(x=x), geom="bar", stat = "bin",
binwidth=width, fill="#6666ee", color="gray"
) + layer(data=dens, mapping=aes(x=seq(0,3,.01), y=y),
geom="line", stat="identity", position="identity", colour="red"
) + ggtitle("Bernoulli to Gamma")
f(x, ↵, ) =
↵
(↵)
x (↵+1)
exp
✓
x
◆
(0  x < 1)
Xi ⇠ Exp( ) Z =
↵X
i=1
Xi ⇠ (↵, )
1/Z ⇠ IG(↵, )
trial_size = 7000; width <- .;
#
p = 0.7; n = 10; np <- p*n;
# n!∞ p!0 np=
n = 10000; p <- np/n; alpha <- 5
#
get_interval <- function(){
cnt <- 0
while (TRUE) {
cnt <- cnt + 1
if (rbern(1, p)==1){ return(cnt) }
}
}
gen_exp_var <- function() {
data <- data.frame(x=rdply(trial_size, get_interval())/n)
names(data) <- c("n", "x")
return(data)
}
result <- rlply(alpha, gen_exp_var())
data <- data.frame(x=1/(result[[1]]$x + result[[2]]$x + result[[3]]$x +
result[[4]]$x + result[[5]]$x))
#
dens <- data.frame(y=dinvgamma(seq(0, 23,.01), shape=5, rate=1/np)*trial_size*width)
ggplot() +
layer(data=data, mapping=aes(x=x), geom="bar", stat = "bin",
binwidth=width, fill="#6666ee", color="gray"
) + layer(data=dens, mapping=aes(x=seq(0,3,.01), y=y),
geom="line", stat="identity", position="identity", colour="red"
) + ggtitle("Bernoulli to Inversegamma")
f(x) =
⇢
1 (0  x  1)
0 (otherwise)
Z = x1(1/2)1
+ x2(1/2)2
+ · · · + xq(1/2)q
width <- 0.02
p <- 0.5;
sample_size <- 1000
trial_size <- 100000
gen_unif_rand <- function() {
# sample_size 2
#
return (sum(rbern(sample_size, p) * (rep(1/2, sample_size)
** seq(sample_size))))
}
gen_rand <- function(){
return( rdply(trial_size, gen_unif_rand()) )
}
system.time(res <- gen_rand())
ggplot() +
layer(data=res, mapping=aes(x=V1), geom="bar", stat = "bin",
binwidth=width, fill="#6666ee", color="gray"
) + ggtitle("Bernoulli to Standard Uniform")
f(x, a, b) =
⇢
(b a) 1
(a  x  b)
0 (otherwise)
a <- 5
b <- 8;
width <- 0.05
p <- 0.5
sample_size <- 1000
trial_size <- 500000
gen_unif_rand <- function() {
# sample_size 2
#
return (sum(rbern(sample_size, p) * (rep(1/2, sample_size)
** seq(sample_size))))
}
gen_rand <- function(){
return( rdply(trial_size, gen_unif_rand()) )
}
system.time(res <- gen_rand())
res$V1 <- res$V1 * (b-a) + a
ggplot() +
layer(data=res, mapping=aes(x=V1), geom="bar", stat = "bin",
binwidth=width, fill="#6666ee", color="gray"
) + ggtitle("Bernoulli to Uniform") + xlim(4,9)
f(x, ↵, ) =
1
B(↵, )
x↵ 1
(1 x) 1
(0 < x < 1)
Xi ⇠ U(0, 1)iid
(i = 1, 2, · · · , ↵ + 1)
width <- 0.03; p <- 0.5
digits_length <- 30; set_size <- 3
trial_size <- 30000
gen_unif_rand <- function() {
# digits_length 2
#
return (sum(rbern(digits_length, p) *
(rep(1/2, digits_length) **
seq(digits_length))))
}
gen_rand <- function(){
return( rdply(set_size, gen_unif_rand())$V1 )
}
unif_dataset <- rlply(trial_size, gen_rand, .progress='text')
p <- ceiling(set_size * 0.5); q <- set_size - p + 1
get_nth_data <- function(a){ return(a[order(a)][p]) }
disp_data <- data.frame(lapply(unif_dataset, get_nth_data))
names(disp_data) <- seq(length(disp_data)); disp_data <- data.frame(t(disp_data))
names(disp_data) <- "V1"
x_range <- seq(0, 1, 0.001)
dens <- data.frame(y=dbeta(x_range, p, q)*trial_size*width)
ggplot() +
layer(data=disp_data, mapping=aes(x=V1), geom="bar", stat = "bin",
binwidth=width, fill="#6666ee", color="gray"
) + layer(data=dens, mapping=aes(x=x_range, y=y),
geom="line", stat="identity", position="identity", colour="red"
) + ggtitle("Bernoulli to Beta")
E[X] = X( )P( ) + X( )P( )
= 0 ⇥ 0.8 + 1, 000, 000 ⇥ 0.2
= 200, 000
E[X] =
X
x
xp(x)
µ
✓
n
x
◆
=
n!
(n x)!x!
E[X] =
nX
x=0
xP(x) =
nX
x=0
x
✓
n
x
◆
px
(1 p)n x
=
nX
x=0
x
n!
(n x)!x!
px
(1 p)n x
=
nX
x=0
n
(n 1)!
(n x)!(x 1)!
px
(1 p)n x
= np
nX
x=0
✓
n 1
m 1
◆
p(x 1)
(1 p)(n 1) (x 1)
= np
= np
nX
x=1
✓
n 1
m 1
◆
p(x 1)
(1 p)(n 1) (x 1)
= np
Var[X] = E[(X E[X])2
]
=
X
x
(x E[x])2
P(x)
= 2
µ
Var[x] = E[(X E[X])2
]
=
Z 1
1
(x E[x])2
f(x)dx
= 2
E[X] =
Z 1
1
xf(x)dx
= µ
E[g(X)] =
Z 1
1
g(x)f(x)dx
g(X) = (X E[X])2
E[ · ] =
Z 1
1
· f(x)dx
g(x) = xk
E[g(X)] = E[Xk
] =
Z 1
1
xk
f(x)dx
µ0
k
g(x) = (x E[x])k
E[g(X)] = E[(X E[X]])k
] =
Z 1
1
(x E[x])k
f(x)dx
µk
E[cX] = cE[X]
* E[cX] =
Z 1
1
cxf(x)dx = c
Z 1
1
xf(x)dx
= cE[X]
Var[cX] = c2
Var[X]
* Var[cX] =
Z 1
1
(cx E[cx])2
f(x)dx
=
Z 1
1
(cx cµ)2
f(x)dx
=
Z 1
1
c2
(x µ)2
f(x)dx
= c2
Z 1
1
(x µ)2
f(x)dx
= c2
Var[X]
P(x < X 5 x + x, y < Y 5 y + y)
x, y ! 0
f(x, y) = lim
x, y!0
P(x < X 5 x + x, y < Y 5 y + y)
f(x, y)
g(x) =
Z 1
1
f(x, y)dy
h(y) =
Z 1
1
f(x, y)dx
g(x)
h(y)
EX,Y [ g(X, Y )] =
Z 1
1
Z 1
1
g(x, y)f(x, y)dxdy
g(x, y) = x0.8
y0.8 (x, y) ⇠ N((4, 4), S) S =

1 0.5
0.4 1
EX,Y [ g(X, Y )] = 8.02
g(X, Y ) = (X µX)(Y µY )
Cov[X, Y ] = E[(X µX)(Y µY )]
g(X, Y ) = (X µX)(Y µY )
µX µX
µX µX
µY
µY
µY
µY
S1 = S2 =
S3 = S4 =

1 0.8
0.8 1

1 0.8
0.8 1

1 0
0 1

1 0.999
0.999 1
Cov[X, Y ] = E[(X µX)(Y µY )]
(x, y) ⇠ N((4, 4), S)
f(x, y)
f(x, y) = g(x)h(y)
f(x, y) = g(x)h(y)
= 0
(x1, x2, · · · , xn)
x1
f(x1) =
Z
· · ·
Z
f(x1, · · · , xn)dx2 · · · dxn
x1
f(x1, · · · , xn) = f(x1) · · · f(xn)
x1 · · · xn
x1 · · · xn
g1(x1), · · · , gn(xn) x1 · · · xn
E[
nY
i=1
gi(xi)] =
nY
i=1
E[gi(xi)]
E[g1(x1)] E[gn(xn)]
E[
nY
i=1
gi(xi)] =
Z 1
1
· · ·
Z 1
1
g1(x1) · · · gn(xn)f(x1, · · · , xn)dx1 · · · dxn
=
Z 1
1
g1(x1)f(x1)dx1 · · ·
Z 1
1
gn(xn)f(xn)dxn
=
nY
i=1
E[gi(xi)]
f(x1) · · · f(xn)
x1 · · · xn
xi µi 2
i i = 1, 2, · · · , n
c = (c1, · · · , cn) c1x1 + · · · + cnxn
c1µ1 + · · · + cnµn
c2
1
2
1 + · · · + c2
n
2
n
E[c1x1 + · · · + cnxn]
=
Z 1
1
· · ·
Z 1
1
(c1x1 + · · · + cnxn)f(x1 · · · , xn)dx1 · · · dxn
= c1
Z 1
1
· · ·
Z 1
1
x1f(x1 · · · , xn)dx1 · · · dxn · · ·
cn
Z 1
1
· · ·
Z 1
1
xnf(x1 · · · , xn)dx1 · · · dxn
=c1
Z 1
1
x1dx1 · · · cn
Z 1
1
xndxn
=c1µ1 + · · · + cnµn
f(x1) · · · f(xn)
f(x1) · · · f(xn)
µ1 µn
=c1
Z 1
1
x1dx1 · · · cn
Z 1
1
xndxn
=c1µ1 + · · · + cnµn
Var[c1x1 + · · · + cnxn]
= E[{(c1x1 + · · · + cnxn) E[c1x1 + · · · + cnxn]}2
]
= E[{c1(x1 µ1) + · · · + c1(x1 µ1)}2
]
= E[
nX
i=1
c2
i (xi µi)2
+
X
i6=j
cicj(xi µj)(xi µj)]
=
nX
i=1
c2
i E[(xi µi)2
] +
X
i6=j
cicjE[(xi µj)(xi µj)]
= c2
1
2
1 + · · · + c2
n
2
n
c1µ1 + · · · + cnµn
= E[xi µi]E[xj µj] = 0= 2
i
x1 · · · xn
x1 · · · xn
xi
µ 2
(µ, 2
)
x1 · · · xn
T = x1 + · · · + xn
E[T] = E[x1 + · · · + xn]
= E[x1] + · · · + E[xn]
= nµ
Var[T] = Var[x1 + · · · + xn]
= Var[x1] + · · · + Var[xn]
= n 2
2
1 = · · · = 2
n
c1 = · · · = cn = 1
Var[c1x1 + · · · + cnxn]
= c2
1
2
1 + · · · + c2
n
2
n
¯x =
1
n
nX
i=1
xi =
1
n
T
E[¯x] =
1
n
E[T] = n ·
1
n
µ = µ
Var[¯x] = Var[
1
n
T] =
1
n2
Var[T] =
2
n
µ
2
Var[¯x] =
2
n
=
0.0833
500
= 0.000166
E[¯x] = 0.5
Var[¯x]
µ 2
P(|x µ| > ) 5
1
2
µ 2
1/ 2
= 1 ) P(|x µ| > ) 5 1
= 2 ) P(|x µ| > ) 5 1/4
= 3 ) P(|x µ| > ) 5 1/9
2
=
Z 1
1
(x µ)2
f(x)dx
=
Z
I1
(x µ)2
f(x)dx +
Z
I2
(x µ)2
f(x)dx +
Z
I3
(x µ)2
f(x)dx
2
=
Z
I1
(x µ)2
f(x)dx +
Z
I3
(x µ)2
f(x)dx
=
Z
I1
2 2
f(x)dx +
Z
I3
2 2
f(x)dx
= 2 2
[P(x 2 I1) + P(x 2 I3)]
I1 = ( 1, µ ),
I2 = [µ , µ + ],
I3 = (µ + , 1)
= P(|x µ| > )
P(|x µ| > ) 5
1
2
)
x1 · · · xn µ
2
" > 0
lim
n!1
P{|¯xn µ| = "} = 0
¯xn =
1
n
nX
i=1
xi
¯xn µ
¯xn ! µ in P
" > 0
P(|¯xn µ| > ")
= P(|¯xn µ| > "
p
n
p
n
)
5
2
"2n
= 2
¯x=
=
1
2
f(x) =
1
p
2⇡ 2
exp
✓
(x µ)2
2 2
◆
f(x) =
1
p
2⇡
exp
✓
x2
2
◆
1 < x < 1
1 < x < 1
f(y) = y2
f(x) = x2
f(y) = y2
f(y) = exp( y2
)
z =
p
2y
f(z) = exp
✓
1
2
z2
◆
Z 1
1
e y2
dy =
p
⇡
Z 1
1
exp
✓
z2
2
◆
dz =
p
2⇡
Z 1
1
1
p
2⇡
exp
✓
z2
2
◆
dz = 1
dz =
p
2dy
Z 1
1
1
p
2⇡
exp
✓
z2
2
◆
dz
z =
x µ dz
dx
=
1
f(x) =
Z 1
1
1
p
2⇡ 2
exp
✓
(x µ)2
2 2
◆
dx
1/
D = (x1, · · · , xn) µ 2
¯x µ
/
p
n
, n ! 1 N(0, 1)
= 0.1, µ =
1
= 10, 2
=
1
2
= 100 ¯x = p
n
=
r
1
2n
=
r
1
0.01 ⇥ 10000
=
r
1
100
=
1
10
g(x) = ext
E[ext
] =
Z 1
1
ext
f(x)dx
Mx(t) = E[ext
]
Mx(t)
My(t)
x
t = 0
y
g(x) = ext
ext
= 1 + xt +
t2
2!
x2
+ · · · +
tk
k!
xk
+ · · ·
Mx(t) = E[ext
]
= E[1 + xt +
t2
2!
x2
+ · · · +
tk
k!
xk
+ · · · ]
= 1 + tE[x] +
t2
2!
E[x2
] + · · · +
tk
k!
E[xk
] + · · ·
= 1 + xµ0
1 +
t2
2!
µ0
2 + · · · +
tk
k!
µ0
k + · · ·
Mx(t)
d
dtk
Mx(t) = E[xk
ext
]
t = 0
d
dtk
Mx(0) = E[xk
] = µ0
k
x ⇠ N(µ, )
Mx(t) = E[ext
] =
Z 1
1
ext 1
p
2⇡ 2
exp
✓
1
2
(x µ)2
2
◆
dx
z =
x µ
x = µ + z dx = dz
Mx(t) =
Z 1
1
e(µ+ z)t 1
p
2⇡ 2
exp
✓
1
2
z2
◆
dz
= eµt
Z 1
1
1
p
2⇡
exp
✓
tz
1
2
z2
◆
dz
= eµt
Z 1
1
1
p
2⇡
exp
✓
1
2
[z2
2 tz 2
t2
+ 2
t2
]
◆
dz
= eµt
Z 1
1
1
p
2⇡
e
2t2
2 exp
✓
1
2
(z t)2
◆
dz
= eµt
e
2t2
2
Z 1
1
1
p
2⇡
exp
✓
1
2
(z t)2
◆
dz
z t = w dz = dw
Mx(t) = eµt
e
2t2
2
Z 1
1
1
p
2⇡
exp
✓
w2
2
◆
dw = eµt+
2t2
2
(f · g)0
= f0
· g + f · g0
(f g)0
(x) = f0
(g(x))g0
(x)
M0
x(t) = (µ + 2
t)eµt+
2t2
2
M00
x (t) = (µ + 2
t)2
⇣
eµt+
2t2
2
⌘
+ 2
⇣
eµt+
2t2
2
⌘
=
⇣
eµt+
2t2
2
⌘
{(µ + 2
t)2
+ 2
}
Var[x] = E[x2
] (E[x])2
= (µ2
+ 2
) (µ)2
= 2
Var[x] = E[(x E[x])2
]
= E[x2
2E[x]x + E[x]2
)
= E[x2
] 2E[x]2
+ E[x]2
= E[x2
] E[x]2
t = 0
E[x] = M0
x(0) = (µ + 2
· 0)eµ·0+
2·02
2 = µ
E[x2
] = M00
x (0) =
⇣
eµ·0+
2·02
2
⌘
{(µ + 2
· 0)2
+ 2
} = µ2
+ 2
D = (x1, · · · , xn) µ 2
¯x µ
/
p
n
, n ! 1
N(0, 1)
T = x1 + · · · + xn
T nµ
p
n
2T0
=
T nµ
p
n
=
¯x µ
1/
p
n
Mx(t)
My(t)
x
t = 0
y
T T0
=
T nµ
p
n
N(0, 2
)
Mxi
(t) = 1 + µ0
1t + µ0
2
t2
2!
+ µ0
3
t3
3!
+ · · ·
Mxi µ(t) = 1 + µ1t + µ2
t2
2!
+ µ3
t3
3!
+ · · ·
= 1 + 0 + 2 t2
2!
+ µ3
t3
3!
+ · · ·
xi µ
p
n
xi µ
p
n
Mxi µ
p
n
(t) = E[e
xi µ
p
n
t
]
= 1 + 2 t2
2!n
+ µ3
t3
3!n3/2
+ · · · + µk
tk
k!nk/2
+ · · ·
= 1 +
2
t2
2n
+
n
2n
=
1
2n
n n ! 0 n ! 0
= 1 +
2
t2
+ n
2n
T0
=
x1 µ
p
n
+
x2 nµ
p
n
+ · · · +
xn µ
p
n
=
nX
i=1
xi µ
p
n
MT 0 (t) = MPn
i=1
⇣
xi µ
p
n
⌘(t) = E[e
Pn
i=1
⇣
xi µ
p
n
⌘
t
]
=
nY
i=0
E[e
⇣
xi µ
p
n
⌘
t
] =
✓
1 +
1
n
2
t2
+ n
2
◆n
er
⌘ lim
n!1
⇣
1 +
r
n
⌘n
r
r
= lim
n!1
⇣
1 +
r
n
⌘n
n ! 1
lim
n!1
MT 0 = lim
n!1
✓
1 +
1
n
2
t2
+ n
2
◆n
= e
2t2
2
lim
n!1
n = 0
N(0, 2
)
T0
=
T nµ
p
n
2
n = 100000
sample_size = 1000
rvs_list = []
m_list = []
for i in range(n):
unif_rvs = st.uniform.rvs(4.5, size=sample_size) # 5
beta_rvs = st.beta.rvs(a=3, b=3, size=sample_size) # 0.5 β
gamma_rvs = st.gamma.rvs(a=3, size=sample_size) # 3
chi2_rvs = st.chi2.rvs(df=5, size=sample_size) #
exp_rvs = st.expon.rvs(loc=0, size=sample_size) # 1
rvs = np.array([unif_rvs, beta_rvs, gamma_rvs, chi2_rvs, exp_rvs]).flatten()
m_list.append(np.mean(rvs))
rvs_list.append(rvs)
#
n = 10000
sample_size = 1000
rvs_list = []
m_list = []
m_unif = st.uniform.rvs(4, 2, size=sample_size)
m_beta_a = st.uniform.rvs(4, 2, size=sample_size)
m_beta_b = st.uniform.rvs(4, 2, size=sample_size)
m_gamma = rd.randint(2,5,size=sample_size)
m_chi2_df = rd.randint(3,6,size=sample_size)
m_exp = st.uniform.rvs(4, 2, size=sample_size)
def gen_random_state():
return int(dt.now().timestamp() * 10**6) - 1492914610000000 + rd.randint(0, 1000000)
def create_rvs(n):
#rd.seed = int(dt.now().timestamp() * 10**6) - 1492914610000000 + rd.randint(0, 1000000)
print("[START]")
for _ in range(n):
unif_rvs = [st.uniform.rvs(m, size=1, random_state=gen_random_state()) for m in
m_unif] # 5
beta_rvs = [st.beta.rvs(a=a, b=b, size=1, random_state=gen_random_state()) for a, b
in zip(m_beta_a, m_beta_b)]# 0.5 β
gamma_rvs = [st.gamma.rvs(a=a, size=1, random_state=gen_random_state()) for a in
m_gamma] # 3
chi2_rvs = [st.chi2.rvs(df=d, size=1, random_state=gen_random_state()) for d in
m_chi2_df] #
exp_rvs = [st.expon.rvs(loc=l, size=1, random_state=gen_random_state()) for l in
m_exp] # 1
rvs = np.array([unif_rvs, beta_rvs, gamma_rvs, chi2_rvs, exp_rvs]).flatten()
l_mean.append(np.mean(rvs))
l_rvs.append(rvs)
print("[END]")
n_jobs = 20
n_each = int(n/n_jobs)
jobs = [Process(target=create_rvs, args=(n_each,)) for _ in range(n_jobs)]
manager = Manager()
l_rvs = manager.list(range(len(jobs)))
l_mean = manager.list(range(len(jobs)))
start_time = time.time()
for j in jobs:
j.start()
time.sleep(0.2)
for j in jobs:
j.join()
finish_time = time.time()
print(finish_time - start_time)
m_list = l_mean[n_jobs:]
rvs_list = np.array(l_rvs[n_jobs:])
print(rvs_list.shape)
D = (x1, · · · , xn)
✓0 = ˆ✓(X1, · · · , Xn)
ˆ✓lower(X1, · · · , Xn) 5 ✓0 5 ˆ✓upper(X1, · · · , Xn)
ˆ✓(X)
E[(ˆ✓(X) ✓)2
]
E[(ˆ✓(X) ✓)2
]
= E[{(E[ˆ✓(X)] ✓) + (ˆ✓(X) E[ˆ✓(X)])}2
]
= E[(E[ˆ✓(X)] ✓)2
+ 2(E[ˆ✓(X)] ✓)(ˆ✓(X) E[ˆ✓(X)]) + (ˆ✓(X) E[ˆ✓(X)])2
]
= (E[ˆ✓(X)] ✓)2
+ Var[ˆ✓(X)]
E[ˆ✓(X)] ✓
E[(ˆ✓(X) ✓)2
] = Var[ˆ✓(X)]
E[¯x] =
1
n
E[T] = n ·
1
n
µ = µ
¯x
s2
=
1
n 1
nX
i=1
(xi ¯x)2
lim
n!1
P{|¯xn µ| = "} = 0 ¯xn ! µ in P
ˆ✓n(X) n ! 1
ˆ✓n(X) ! ✓ in P
ˆ✓n(X)
¯xn µ
Var[ˆ✓(X)]
ˆ✓(X)
D = (x1, · · · , xn) xi
f(xi)
nY
i=1
f(xi)
nY
i=1
f(xi|✓)
xi
`(✓|x1, x2, · · · , xn) =
nY
i=1
f(xi|✓)
x1, x2, · · · , x10
f(x1, x2, · · · , x10|µ, 2
) =
10Y
i=1
1
p
2⇡ 2
exp
✓
1
2
(xi µ)2
2
◆
`(µ, 2
|x1, x2, · · · , x10) =
10Y
i=1
1
p
2⇡ 2
exp
✓
1
2
(xi µ)2
2
◆
✓⇤
= arg max
✓
`(✓|x1, x2, · · · , xn)
log `(✓|x1, · · · , xn) ⌘ L(✓|x1, · · · , xn)
`
µ, 2
L(µ, 2
|x1, x2, · · · , x10) =
n
2
(2⇡)
n
2
log 2 1
2 2
nX
i=1
(xi µ)2
@L
@µ
=
1
2 2
nX
i=1
(xi µ)2
)
nX
i=1
xi = nµ
) µ⇤
=
1
n
nX
i=1
xi
`(µ, 2
|x1, x2, · · · , xn) =
nY
i=1
1
p
2⇡ 2
exp
✓
1
2
(xi µ)2
2
◆
@L
@ 2
=
n
2
1
2
+
1
2( 2)2
nX
i=1
(xi µ)2
= 0
)
1
2( 2)2
nX
i=1
(xi µ)2
=
n
2 2
) 2⇤
=
1
n
nX
i=1
(xi µ)2
2⇤
D = (x1, · · · , xn)µ 2
µ
u ⇠ N(0, 1)
t =
u
p
v/m
v ⇠ 2
(m)
f(t) =
m+1
2
p
m⇡ m
2
✓
t2
m
+ 1
◆ m+1
2
u ⇠ N(0, 1) v ⇠ 2
(m) v > 01 < u < +1
f(u, v) =
1
p
2⇡
exp
✓
u2
2
◆
(1/2)n/2
(n/2)
vn/2 1
e v/2
t =
u
p
v/m
x = v
f(t) =
m+1
2
p
m⇡ m
2
✓
t2
m
+ 1
◆ m+1
2
(z) =
Z 1
0
tz 1
e t
dt
µ
D = (x1, · · · , xn) xi ⇠ N(µ, 2
)
¯x ⇠ N(µ, 2
/n)¯x
1
2
nX
i=1
(xi ¯x)2
⇠ 2
n 1
u =
¯x µ
/
p
n
⇠ N(0, 1) v =
1
2
nX
i=1
(xi ¯x)2
⇠ 2
n 1
t =
u
p
v/(n 1)
=
¯x µ
/
p
n
·
"
1
2
1
(n 1)
nX
i=1
(xi ¯x)2
# 1/2
=
¯x µ
1/
p
n
·
1
p
s2
=
¯x µ
s/
p
n
⇠ tn 1
s2
=
1
n 1
nX
i=1
(xi ¯x)2
s2
P
✓
tn 1;↵/2 5
¯x µ
s/
p
n
5 tn 1;↵/2
◆
= 1 ↵
tn 1;↵/2 tn 1;↵/2
↵/2 ↵/2
1 ↵
1 ↵
1 ↵
P
✓
¯x tn 1;↵/2
s
p
n
5 µ 5 ¯x + tn 1;↵/2
s
p
n
◆
= 1 ↵
[ tn 1;↵/2, tn 1;↵/2]
µ
1 ↵
P
✓
tn 1;↵/2 5
¯x µ
s/
p
n
5 tn 1;↵/2
◆
= 1 ↵
tn 1;↵/2 tn 1;↵/2
↵/2 ↵/2
1 ↵
1 ↵
1 ↵
P
✓
¯x tn 1;↵/2
s
p
n
5 µ 5 ¯x + tn 1;↵/2
s
p
n
◆
= 1 ↵
[ tn 1;↵/2, tn 1;↵/2]
µ
1 ↵
= 1 µ = 0
H0 : µ0 = 0
H1 : µ 6= µ0
¯x = /
p
n
/
p
10 ; /3.16
↵/2 ↵/2
H0 : µ0 = 0
H1 : µ = 1
H1 : µ = 0.5
H1 : µ = 3
µ0H1 : µ = 3
H0 : µ0 = 0
e↵ect size : =
µ µ0
…
…
…
…
…
…
…
…
r =
1
n
Pn
i=1(xi ¯x)(yi ¯y)
q
1
n
Pn
i=1(xi ¯x)2
q
1
n
Pn
i=1(yi ¯y)2
r =
1
n
Pn
i=1(xi ¯x)(yi ¯y)
q
1
n
Pn
i=1(xi ¯x)2
q
1
n
Pn
i=1(yi ¯y)2
r =
1
n
Pn
i=1(xi ¯x)(yi ¯y)
q
1
n
Pn
i=1(xi ¯x)2
q
1
n
Pn
i=1(yi ¯y)2
r =
1
n
Pn
i=1(xi ¯x)(yi ¯y)
q
1
n
Pn
i=1(xi ¯x)2
q
1
n
Pn
i=1(yi ¯y)2
1
n
nX
i=1
(xi ¯x)(yi ¯y)
1
n
nX
i=1
(xi ¯x)(yi ¯y)
1
n
nX
i=1
(xi ¯x)(yi ¯y)
1
n
nX
i=1
(xi ¯x)(yi ¯y)
1
n
nX
i=1
(xi ¯x)(yi ¯y)
1
n
nX
i=1
(xi ¯x)(yi ¯y)
1
n
nX
i=1
(xi ¯x)(yi ¯y)
数学カフェ 確率・統計・機械学習回 「速習 確率・統計」
数学カフェ 確率・統計・機械学習回 「速習 確率・統計」

Mais conteúdo relacionado

Mais procurados

ゼロから始める深層強化学習(NLP2018講演資料)/ Introduction of Deep Reinforcement Learning
ゼロから始める深層強化学習(NLP2018講演資料)/ Introduction of Deep Reinforcement Learningゼロから始める深層強化学習(NLP2018講演資料)/ Introduction of Deep Reinforcement Learning
ゼロから始める深層強化学習(NLP2018講演資料)/ Introduction of Deep Reinforcement LearningPreferred Networks
 
猫でも分かるVariational AutoEncoder
猫でも分かるVariational AutoEncoder猫でも分かるVariational AutoEncoder
猫でも分かるVariational AutoEncoderSho Tatsuno
 
グラフニューラルネットワーク入門
グラフニューラルネットワーク入門グラフニューラルネットワーク入門
グラフニューラルネットワーク入門ryosuke-kojima
 
全力解説!Transformer
全力解説!Transformer全力解説!Transformer
全力解説!TransformerArithmer Inc.
 
Control as Inference (強化学習とベイズ統計)
Control as Inference (強化学習とベイズ統計)Control as Inference (強化学習とベイズ統計)
Control as Inference (強化学習とベイズ統計)Shohei Taniguchi
 
PRML学習者から入る深層生成モデル入門
PRML学習者から入る深層生成モデル入門PRML学習者から入る深層生成モデル入門
PRML学習者から入る深層生成モデル入門tmtm otm
 
Active Learning 入門
Active Learning 入門Active Learning 入門
Active Learning 入門Shuyo Nakatani
 
合成変量とアンサンブル:回帰森と加法モデルの要点
合成変量とアンサンブル:回帰森と加法モデルの要点合成変量とアンサンブル:回帰森と加法モデルの要点
合成変量とアンサンブル:回帰森と加法モデルの要点Ichigaku Takigawa
 
因果探索: 基本から最近の発展までを概説
因果探索: 基本から最近の発展までを概説因果探索: 基本から最近の発展までを概説
因果探索: 基本から最近の発展までを概説Shiga University, RIKEN
 
If文から機械学習への道
If文から機械学習への道If文から機械学習への道
If文から機械学習への道nishio
 
深層学習の不確実性 - Uncertainty in Deep Neural Networks -
深層学習の不確実性 - Uncertainty in Deep Neural Networks -深層学習の不確実性 - Uncertainty in Deep Neural Networks -
深層学習の不確実性 - Uncertainty in Deep Neural Networks -tmtm otm
 
PyMCがあれば,ベイズ推定でもう泣いたりなんかしない
PyMCがあれば,ベイズ推定でもう泣いたりなんかしないPyMCがあれば,ベイズ推定でもう泣いたりなんかしない
PyMCがあれば,ベイズ推定でもう泣いたりなんかしないToshihiro Kamishima
 
最適輸送の解き方
最適輸送の解き方最適輸送の解き方
最適輸送の解き方joisino
 
組合せ最適化入門:線形計画から整数計画まで
組合せ最適化入門:線形計画から整数計画まで組合せ最適化入門:線形計画から整数計画まで
組合せ最適化入門:線形計画から整数計画までShunji Umetani
 
Statistical Semantic入門 ~分布仮説からword2vecまで~
Statistical Semantic入門 ~分布仮説からword2vecまで~Statistical Semantic入門 ~分布仮説からword2vecまで~
Statistical Semantic入門 ~分布仮説からword2vecまで~Yuya Unno
 
Triplet Loss 徹底解説
Triplet Loss 徹底解説Triplet Loss 徹底解説
Triplet Loss 徹底解説tancoro
 

Mais procurados (20)

研究効率化Tips Ver.2
研究効率化Tips Ver.2研究効率化Tips Ver.2
研究効率化Tips Ver.2
 
ゼロから始める深層強化学習(NLP2018講演資料)/ Introduction of Deep Reinforcement Learning
ゼロから始める深層強化学習(NLP2018講演資料)/ Introduction of Deep Reinforcement Learningゼロから始める深層強化学習(NLP2018講演資料)/ Introduction of Deep Reinforcement Learning
ゼロから始める深層強化学習(NLP2018講演資料)/ Introduction of Deep Reinforcement Learning
 
猫でも分かるVariational AutoEncoder
猫でも分かるVariational AutoEncoder猫でも分かるVariational AutoEncoder
猫でも分かるVariational AutoEncoder
 
グラフニューラルネットワーク入門
グラフニューラルネットワーク入門グラフニューラルネットワーク入門
グラフニューラルネットワーク入門
 
線形計画法入門
線形計画法入門線形計画法入門
線形計画法入門
 
全力解説!Transformer
全力解説!Transformer全力解説!Transformer
全力解説!Transformer
 
Control as Inference (強化学習とベイズ統計)
Control as Inference (強化学習とベイズ統計)Control as Inference (強化学習とベイズ統計)
Control as Inference (強化学習とベイズ統計)
 
PRML学習者から入る深層生成モデル入門
PRML学習者から入る深層生成モデル入門PRML学習者から入る深層生成モデル入門
PRML学習者から入る深層生成モデル入門
 
Marp Tutorial
Marp TutorialMarp Tutorial
Marp Tutorial
 
Active Learning 入門
Active Learning 入門Active Learning 入門
Active Learning 入門
 
合成変量とアンサンブル:回帰森と加法モデルの要点
合成変量とアンサンブル:回帰森と加法モデルの要点合成変量とアンサンブル:回帰森と加法モデルの要点
合成変量とアンサンブル:回帰森と加法モデルの要点
 
因果探索: 基本から最近の発展までを概説
因果探索: 基本から最近の発展までを概説因果探索: 基本から最近の発展までを概説
因果探索: 基本から最近の発展までを概説
 
If文から機械学習への道
If文から機械学習への道If文から機械学習への道
If文から機械学習への道
 
深層学習の不確実性 - Uncertainty in Deep Neural Networks -
深層学習の不確実性 - Uncertainty in Deep Neural Networks -深層学習の不確実性 - Uncertainty in Deep Neural Networks -
深層学習の不確実性 - Uncertainty in Deep Neural Networks -
 
PyMCがあれば,ベイズ推定でもう泣いたりなんかしない
PyMCがあれば,ベイズ推定でもう泣いたりなんかしないPyMCがあれば,ベイズ推定でもう泣いたりなんかしない
PyMCがあれば,ベイズ推定でもう泣いたりなんかしない
 
最適輸送の解き方
最適輸送の解き方最適輸送の解き方
最適輸送の解き方
 
組合せ最適化入門:線形計画から整数計画まで
組合せ最適化入門:線形計画から整数計画まで組合せ最適化入門:線形計画から整数計画まで
組合せ最適化入門:線形計画から整数計画まで
 
Statistical Semantic入門 ~分布仮説からword2vecまで~
Statistical Semantic入門 ~分布仮説からword2vecまで~Statistical Semantic入門 ~分布仮説からword2vecまで~
Statistical Semantic入門 ~分布仮説からword2vecまで~
 
実装レベルで学ぶVQVAE
実装レベルで学ぶVQVAE実装レベルで学ぶVQVAE
実装レベルで学ぶVQVAE
 
Triplet Loss 徹底解説
Triplet Loss 徹底解説Triplet Loss 徹底解説
Triplet Loss 徹底解説
 

Destaque

楕円曲線入門 トーラスと楕円曲線のつながり
楕円曲線入門トーラスと楕円曲線のつながり楕円曲線入門トーラスと楕円曲線のつながり
楕円曲線入門 トーラスと楕円曲線のつながりMITSUNARI Shigeo
 
数学つまみぐい入門編
数学つまみぐい入門編数学つまみぐい入門編
数学つまみぐい入門編Akira Yamaguchi
 
20170422 数学カフェ Part2
20170422 数学カフェ Part220170422 数学カフェ Part2
20170422 数学カフェ Part2Kenta Oono
 
20170422 数学カフェ Part1
20170422 数学カフェ Part120170422 数学カフェ Part1
20170422 数学カフェ Part1Kenta Oono
 
機械学習のためのベイズ最適化入門
機械学習のためのベイズ最適化入門機械学習のためのベイズ最適化入門
機械学習のためのベイズ最適化入門hoxo_m
 

Destaque (7)

圏とHaskellの型
圏とHaskellの型圏とHaskellの型
圏とHaskellの型
 
楕円曲線入門 トーラスと楕円曲線のつながり
楕円曲線入門トーラスと楕円曲線のつながり楕円曲線入門トーラスと楕円曲線のつながり
楕円曲線入門 トーラスと楕円曲線のつながり
 
数学つまみぐい入門編
数学つまみぐい入門編数学つまみぐい入門編
数学つまみぐい入門編
 
20170422 数学カフェ Part2
20170422 数学カフェ Part220170422 数学カフェ Part2
20170422 数学カフェ Part2
 
20170422 数学カフェ Part1
20170422 数学カフェ Part120170422 数学カフェ Part1
20170422 数学カフェ Part1
 
CuPy解説
CuPy解説CuPy解説
CuPy解説
 
機械学習のためのベイズ最適化入門
機械学習のためのベイズ最適化入門機械学習のためのベイズ最適化入門
機械学習のためのベイズ最適化入門
 

Semelhante a 数学カフェ 確率・統計・機械学習回 「速習 確率・統計」

第13回数学カフェ「素数!!」二次会 LT資料「乱数!!」
第13回数学カフェ「素数!!」二次会 LT資料「乱数!!」第13回数学カフェ「素数!!」二次会 LT資料「乱数!!」
第13回数学カフェ「素数!!」二次会 LT資料「乱数!!」Ken'ichi Matsui
 
Advanced Data Visualization in R- Somes Examples.
Advanced Data Visualization in R- Somes Examples.Advanced Data Visualization in R- Somes Examples.
Advanced Data Visualization in R- Somes Examples.Dr. Volkan OBAN
 
Cg my own programs
Cg my own programsCg my own programs
Cg my own programsAmit Kapoor
 
Computer Graphics Lab File C Programs
Computer Graphics Lab File C ProgramsComputer Graphics Lab File C Programs
Computer Graphics Lab File C ProgramsKandarp Tiwari
 
cps170_bayes_nets.ppt
cps170_bayes_nets.pptcps170_bayes_nets.ppt
cps170_bayes_nets.pptFaizAbaas
 
Advanced Data Visualization Examples with R-Part II
Advanced Data Visualization Examples with R-Part IIAdvanced Data Visualization Examples with R-Part II
Advanced Data Visualization Examples with R-Part IIDr. Volkan OBAN
 
A/B Testing for Game Design
A/B Testing for Game DesignA/B Testing for Game Design
A/B Testing for Game DesignTrieu Nguyen
 
The Ring programming language version 1.10 book - Part 81 of 212
The Ring programming language version 1.10 book - Part 81 of 212The Ring programming language version 1.10 book - Part 81 of 212
The Ring programming language version 1.10 book - Part 81 of 212Mahmoud Samir Fayed
 
Computer graphics lab manual
Computer graphics lab manualComputer graphics lab manual
Computer graphics lab manualUma mohan
 
Haskellで学ぶ関数型言語
Haskellで学ぶ関数型言語Haskellで学ぶ関数型言語
Haskellで学ぶ関数型言語ikdysfm
 
Javasccript MV* frameworks
Javasccript MV* frameworksJavasccript MV* frameworks
Javasccript MV* frameworksKerry Buckley
 
Plot3D package in R-package-for-3d-and-4d-graph-Data visualization.
Plot3D package in R-package-for-3d-and-4d-graph-Data visualization.Plot3D package in R-package-for-3d-and-4d-graph-Data visualization.
Plot3D package in R-package-for-3d-and-4d-graph-Data visualization.Dr. Volkan OBAN
 
Plot3D Package and Example in R.-Data visualizat,on
Plot3D Package and Example in R.-Data visualizat,onPlot3D Package and Example in R.-Data visualizat,on
Plot3D Package and Example in R.-Data visualizat,onDr. Volkan OBAN
 
Создание картограмм на принципах грамматики графики. С помощью R-расширения g...
Создание картограмм на принципах грамматики графики. С помощью R-расширения g...Создание картограмм на принципах грамматики графики. С помощью R-расширения g...
Создание картограмм на принципах грамматики графики. С помощью R-расширения g...Matrunich Consulting
 
MH prediction modeling and validation in r (2) classification 190709
MH prediction modeling and validation in r (2) classification 190709MH prediction modeling and validation in r (2) classification 190709
MH prediction modeling and validation in r (2) classification 190709Min-hyung Kim
 
ゲーム理論BASIC 第40回 -仁-
ゲーム理論BASIC 第40回 -仁-ゲーム理論BASIC 第40回 -仁-
ゲーム理論BASIC 第40回 -仁-ssusere0a682
 

Semelhante a 数学カフェ 確率・統計・機械学習回 「速習 確率・統計」 (20)

第13回数学カフェ「素数!!」二次会 LT資料「乱数!!」
第13回数学カフェ「素数!!」二次会 LT資料「乱数!!」第13回数学カフェ「素数!!」二次会 LT資料「乱数!!」
第13回数学カフェ「素数!!」二次会 LT資料「乱数!!」
 
Advanced Data Visualization in R- Somes Examples.
Advanced Data Visualization in R- Somes Examples.Advanced Data Visualization in R- Somes Examples.
Advanced Data Visualization in R- Somes Examples.
 
Cg my own programs
Cg my own programsCg my own programs
Cg my own programs
 
Computer Graphics Lab File C Programs
Computer Graphics Lab File C ProgramsComputer Graphics Lab File C Programs
Computer Graphics Lab File C Programs
 
cps170_bayes_nets.ppt
cps170_bayes_nets.pptcps170_bayes_nets.ppt
cps170_bayes_nets.ppt
 
Advanced Data Visualization Examples with R-Part II
Advanced Data Visualization Examples with R-Part IIAdvanced Data Visualization Examples with R-Part II
Advanced Data Visualization Examples with R-Part II
 
A/B Testing for Game Design
A/B Testing for Game DesignA/B Testing for Game Design
A/B Testing for Game Design
 
Joclad 2010 d
Joclad 2010 dJoclad 2010 d
Joclad 2010 d
 
Genomic Graphics
Genomic GraphicsGenomic Graphics
Genomic Graphics
 
The Ring programming language version 1.10 book - Part 81 of 212
The Ring programming language version 1.10 book - Part 81 of 212The Ring programming language version 1.10 book - Part 81 of 212
The Ring programming language version 1.10 book - Part 81 of 212
 
Computer graphics lab manual
Computer graphics lab manualComputer graphics lab manual
Computer graphics lab manual
 
Haskellで学ぶ関数型言語
Haskellで学ぶ関数型言語Haskellで学ぶ関数型言語
Haskellで学ぶ関数型言語
 
Javasccript MV* frameworks
Javasccript MV* frameworksJavasccript MV* frameworks
Javasccript MV* frameworks
 
Plot3D package in R-package-for-3d-and-4d-graph-Data visualization.
Plot3D package in R-package-for-3d-and-4d-graph-Data visualization.Plot3D package in R-package-for-3d-and-4d-graph-Data visualization.
Plot3D package in R-package-for-3d-and-4d-graph-Data visualization.
 
Plot3D Package and Example in R.-Data visualizat,on
Plot3D Package and Example in R.-Data visualizat,onPlot3D Package and Example in R.-Data visualizat,on
Plot3D Package and Example in R.-Data visualizat,on
 
Scrollytelling
ScrollytellingScrollytelling
Scrollytelling
 
Introduction to R
Introduction to RIntroduction to R
Introduction to R
 
Создание картограмм на принципах грамматики графики. С помощью R-расширения g...
Создание картограмм на принципах грамматики графики. С помощью R-расширения g...Создание картограмм на принципах грамматики графики. С помощью R-расширения g...
Создание картограмм на принципах грамматики графики. С помощью R-расширения g...
 
MH prediction modeling and validation in r (2) classification 190709
MH prediction modeling and validation in r (2) classification 190709MH prediction modeling and validation in r (2) classification 190709
MH prediction modeling and validation in r (2) classification 190709
 
ゲーム理論BASIC 第40回 -仁-
ゲーム理論BASIC 第40回 -仁-ゲーム理論BASIC 第40回 -仁-
ゲーム理論BASIC 第40回 -仁-
 

Mais de Ken'ichi Matsui

ベータ分布の謎に迫る
ベータ分布の謎に迫るベータ分布の謎に迫る
ベータ分布の謎に迫るKen'ichi Matsui
 
音楽波形データからコードを推定してみる
音楽波形データからコードを推定してみる音楽波形データからコードを推定してみる
音楽波形データからコードを推定してみるKen'ichi Matsui
 
データサイエンティストの仕事とデータ分析コンテスト
データサイエンティストの仕事とデータ分析コンテストデータサイエンティストの仕事とデータ分析コンテスト
データサイエンティストの仕事とデータ分析コンテストKen'ichi Matsui
 
分析コンペティションの光と影
分析コンペティションの光と影分析コンペティションの光と影
分析コンペティションの光と影Ken'ichi Matsui
 
Kaggle Google Quest Q&A Labeling 反省会 LT資料 47th place solution
Kaggle Google Quest Q&A Labeling 反省会 LT資料 47th place solutionKaggle Google Quest Q&A Labeling 反省会 LT資料 47th place solution
Kaggle Google Quest Q&A Labeling 反省会 LT資料 47th place solutionKen'ichi Matsui
 
データ分析コンテストとデータサイエンティストの働きかた
データ分析コンテストとデータサイエンティストの働きかたデータ分析コンテストとデータサイエンティストの働きかた
データ分析コンテストとデータサイエンティストの働きかたKen'ichi Matsui
 
確率分布の成り立ちを理解してスポーツにあてはめてみる
確率分布の成り立ちを理解してスポーツにあてはめてみる確率分布の成り立ちを理解してスポーツにあてはめてみる
確率分布の成り立ちを理解してスポーツにあてはめてみるKen'ichi Matsui
 
SIGNATE 産業技術総合研究所 衛星画像分析コンテスト 2位入賞モデルの工夫点
SIGNATE産業技術総合研究所 衛星画像分析コンテスト2位入賞モデルの工夫点SIGNATE産業技術総合研究所 衛星画像分析コンテスト2位入賞モデルの工夫点
SIGNATE 産業技術総合研究所 衛星画像分析コンテスト 2位入賞モデルの工夫点Ken'ichi Matsui
 
Variational Autoencoderの紹介
Variational Autoencoderの紹介Variational Autoencoderの紹介
Variational Autoencoderの紹介Ken'ichi Matsui
 
DS LT祭り 「AUCが0.01改善したって どういうことですか?」
DS LT祭り 「AUCが0.01改善したって どういうことですか?」DS LT祭り 「AUCが0.01改善したって どういうことですか?」
DS LT祭り 「AUCが0.01改善したって どういうことですか?」Ken'ichi Matsui
 
統計的学習の基礎 4章 前半
統計的学習の基礎 4章 前半統計的学習の基礎 4章 前半
統計的学習の基礎 4章 前半Ken'ichi Matsui
 
基礎からのベイズ統計学 輪読会資料 第8章 「比率・相関・信頼性」
基礎からのベイズ統計学 輪読会資料  第8章 「比率・相関・信頼性」基礎からのベイズ統計学 輪読会資料  第8章 「比率・相関・信頼性」
基礎からのベイズ統計学 輪読会資料 第8章 「比率・相関・信頼性」Ken'ichi Matsui
 
「ベータ分布の謎に迫る」第6回 プログラマのための数学勉強会 LT資料
「ベータ分布の謎に迫る」第6回 プログラマのための数学勉強会 LT資料「ベータ分布の謎に迫る」第6回 プログラマのための数学勉強会 LT資料
「ベータ分布の謎に迫る」第6回 プログラマのための数学勉強会 LT資料Ken'ichi Matsui
 
15分でわかる(範囲の)ベイズ統計学
15分でわかる(範囲の)ベイズ統計学15分でわかる(範囲の)ベイズ統計学
15分でわかる(範囲の)ベイズ統計学Ken'ichi Matsui
 
Random Forest による分類
Random Forest による分類Random Forest による分類
Random Forest による分類Ken'ichi Matsui
 
基礎からのベイズ統計学 輪読会資料 第4章 メトロポリス・ヘイスティングス法
基礎からのベイズ統計学 輪読会資料 第4章 メトロポリス・ヘイスティングス法基礎からのベイズ統計学 輪読会資料 第4章 メトロポリス・ヘイスティングス法
基礎からのベイズ統計学 輪読会資料 第4章 メトロポリス・ヘイスティングス法Ken'ichi Matsui
 
「全ての確率はコイン投げに通ず」 Japan.R 発表資料
「全ての確率はコイン投げに通ず」 Japan.R 発表資料「全ての確率はコイン投げに通ず」 Japan.R 発表資料
「全ての確率はコイン投げに通ず」 Japan.R 発表資料Ken'ichi Matsui
 
「内積が見えると統計学も見える」第5回 プログラマのための数学勉強会 発表資料
「内積が見えると統計学も見える」第5回 プログラマのための数学勉強会 発表資料 「内積が見えると統計学も見える」第5回 プログラマのための数学勉強会 発表資料
「内積が見えると統計学も見える」第5回 プログラマのための数学勉強会 発表資料 Ken'ichi Matsui
 

Mais de Ken'ichi Matsui (20)

ベータ分布の謎に迫る
ベータ分布の謎に迫るベータ分布の謎に迫る
ベータ分布の謎に迫る
 
音楽波形データからコードを推定してみる
音楽波形データからコードを推定してみる音楽波形データからコードを推定してみる
音楽波形データからコードを推定してみる
 
データサイエンティストの仕事とデータ分析コンテスト
データサイエンティストの仕事とデータ分析コンテストデータサイエンティストの仕事とデータ分析コンテスト
データサイエンティストの仕事とデータ分析コンテスト
 
分析コンペティションの光と影
分析コンペティションの光と影分析コンペティションの光と影
分析コンペティションの光と影
 
Kaggle Google Quest Q&A Labeling 反省会 LT資料 47th place solution
Kaggle Google Quest Q&A Labeling 反省会 LT資料 47th place solutionKaggle Google Quest Q&A Labeling 反省会 LT資料 47th place solution
Kaggle Google Quest Q&A Labeling 反省会 LT資料 47th place solution
 
BERT入門
BERT入門BERT入門
BERT入門
 
データ分析コンテストとデータサイエンティストの働きかた
データ分析コンテストとデータサイエンティストの働きかたデータ分析コンテストとデータサイエンティストの働きかた
データ分析コンテストとデータサイエンティストの働きかた
 
確率分布の成り立ちを理解してスポーツにあてはめてみる
確率分布の成り立ちを理解してスポーツにあてはめてみる確率分布の成り立ちを理解してスポーツにあてはめてみる
確率分布の成り立ちを理解してスポーツにあてはめてみる
 
SIGNATE 産業技術総合研究所 衛星画像分析コンテスト 2位入賞モデルの工夫点
SIGNATE産業技術総合研究所 衛星画像分析コンテスト2位入賞モデルの工夫点SIGNATE産業技術総合研究所 衛星画像分析コンテスト2位入賞モデルの工夫点
SIGNATE 産業技術総合研究所 衛星画像分析コンテスト 2位入賞モデルの工夫点
 
Introduction of VAE
Introduction of VAEIntroduction of VAE
Introduction of VAE
 
Variational Autoencoderの紹介
Variational Autoencoderの紹介Variational Autoencoderの紹介
Variational Autoencoderの紹介
 
DS LT祭り 「AUCが0.01改善したって どういうことですか?」
DS LT祭り 「AUCが0.01改善したって どういうことですか?」DS LT祭り 「AUCが0.01改善したって どういうことですか?」
DS LT祭り 「AUCが0.01改善したって どういうことですか?」
 
統計的学習の基礎 4章 前半
統計的学習の基礎 4章 前半統計的学習の基礎 4章 前半
統計的学習の基礎 4章 前半
 
基礎からのベイズ統計学 輪読会資料 第8章 「比率・相関・信頼性」
基礎からのベイズ統計学 輪読会資料  第8章 「比率・相関・信頼性」基礎からのベイズ統計学 輪読会資料  第8章 「比率・相関・信頼性」
基礎からのベイズ統計学 輪読会資料 第8章 「比率・相関・信頼性」
 
「ベータ分布の謎に迫る」第6回 プログラマのための数学勉強会 LT資料
「ベータ分布の謎に迫る」第6回 プログラマのための数学勉強会 LT資料「ベータ分布の謎に迫る」第6回 プログラマのための数学勉強会 LT資料
「ベータ分布の謎に迫る」第6回 プログラマのための数学勉強会 LT資料
 
15分でわかる(範囲の)ベイズ統計学
15分でわかる(範囲の)ベイズ統計学15分でわかる(範囲の)ベイズ統計学
15分でわかる(範囲の)ベイズ統計学
 
Random Forest による分類
Random Forest による分類Random Forest による分類
Random Forest による分類
 
基礎からのベイズ統計学 輪読会資料 第4章 メトロポリス・ヘイスティングス法
基礎からのベイズ統計学 輪読会資料 第4章 メトロポリス・ヘイスティングス法基礎からのベイズ統計学 輪読会資料 第4章 メトロポリス・ヘイスティングス法
基礎からのベイズ統計学 輪読会資料 第4章 メトロポリス・ヘイスティングス法
 
「全ての確率はコイン投げに通ず」 Japan.R 発表資料
「全ての確率はコイン投げに通ず」 Japan.R 発表資料「全ての確率はコイン投げに通ず」 Japan.R 発表資料
「全ての確率はコイン投げに通ず」 Japan.R 発表資料
 
「内積が見えると統計学も見える」第5回 プログラマのための数学勉強会 発表資料
「内積が見えると統計学も見える」第5回 プログラマのための数学勉強会 発表資料 「内積が見えると統計学も見える」第5回 プログラマのための数学勉強会 発表資料
「内積が見えると統計学も見える」第5回 プログラマのための数学勉強会 発表資料
 

Último

Data Factory in Microsoft Fabric (MsBIP #82)
Data Factory in Microsoft Fabric (MsBIP #82)Data Factory in Microsoft Fabric (MsBIP #82)
Data Factory in Microsoft Fabric (MsBIP #82)Cathrine Wilhelmsen
 
Learn How Data Science Changes Our World
Learn How Data Science Changes Our WorldLearn How Data Science Changes Our World
Learn How Data Science Changes Our WorldEduminds Learning
 
Advanced Machine Learning for Business Professionals
Advanced Machine Learning for Business ProfessionalsAdvanced Machine Learning for Business Professionals
Advanced Machine Learning for Business ProfessionalsVICTOR MAESTRE RAMIREZ
 
Data Analysis Project Presentation: Unveiling Your Ideal Customer, Bank Custo...
Data Analysis Project Presentation: Unveiling Your Ideal Customer, Bank Custo...Data Analysis Project Presentation: Unveiling Your Ideal Customer, Bank Custo...
Data Analysis Project Presentation: Unveiling Your Ideal Customer, Bank Custo...Boston Institute of Analytics
 
Digital Marketing Plan, how digital marketing works
Digital Marketing Plan, how digital marketing worksDigital Marketing Plan, how digital marketing works
Digital Marketing Plan, how digital marketing worksdeepakthakur548787
 
Student Profile Sample report on improving academic performance by uniting gr...
Student Profile Sample report on improving academic performance by uniting gr...Student Profile Sample report on improving academic performance by uniting gr...
Student Profile Sample report on improving academic performance by uniting gr...Seán Kennedy
 
Semantic Shed - Squashing and Squeezing.pptx
Semantic Shed - Squashing and Squeezing.pptxSemantic Shed - Squashing and Squeezing.pptx
Semantic Shed - Squashing and Squeezing.pptxMike Bennett
 
SMOTE and K-Fold Cross Validation-Presentation.pptx
SMOTE and K-Fold Cross Validation-Presentation.pptxSMOTE and K-Fold Cross Validation-Presentation.pptx
SMOTE and K-Fold Cross Validation-Presentation.pptxHaritikaChhatwal1
 
INTRODUCTION TO Natural language processing
INTRODUCTION TO Natural language processingINTRODUCTION TO Natural language processing
INTRODUCTION TO Natural language processingsocarem879
 
FAIR, FAIRsharing, FAIR Cookbook and ELIXIR - Sansone SA - Boston 2024
FAIR, FAIRsharing, FAIR Cookbook and ELIXIR - Sansone SA - Boston 2024FAIR, FAIRsharing, FAIR Cookbook and ELIXIR - Sansone SA - Boston 2024
FAIR, FAIRsharing, FAIR Cookbook and ELIXIR - Sansone SA - Boston 2024Susanna-Assunta Sansone
 
Data Analysis Project : Targeting the Right Customers, Presentation on Bank M...
Data Analysis Project : Targeting the Right Customers, Presentation on Bank M...Data Analysis Project : Targeting the Right Customers, Presentation on Bank M...
Data Analysis Project : Targeting the Right Customers, Presentation on Bank M...Boston Institute of Analytics
 
Networking Case Study prepared by teacher.pptx
Networking Case Study prepared by teacher.pptxNetworking Case Study prepared by teacher.pptx
Networking Case Study prepared by teacher.pptxHimangsuNath
 
Conf42-LLM_Adding Generative AI to Real-Time Streaming Pipelines
Conf42-LLM_Adding Generative AI to Real-Time Streaming PipelinesConf42-LLM_Adding Generative AI to Real-Time Streaming Pipelines
Conf42-LLM_Adding Generative AI to Real-Time Streaming PipelinesTimothy Spann
 
NO1 Certified Black Magic Specialist Expert Amil baba in Lahore Islamabad Raw...
NO1 Certified Black Magic Specialist Expert Amil baba in Lahore Islamabad Raw...NO1 Certified Black Magic Specialist Expert Amil baba in Lahore Islamabad Raw...
NO1 Certified Black Magic Specialist Expert Amil baba in Lahore Islamabad Raw...Amil Baba Dawood bangali
 
Bank Loan Approval Analysis: A Comprehensive Data Analysis Project
Bank Loan Approval Analysis: A Comprehensive Data Analysis ProjectBank Loan Approval Analysis: A Comprehensive Data Analysis Project
Bank Loan Approval Analysis: A Comprehensive Data Analysis ProjectBoston Institute of Analytics
 
Unveiling the Role of Social Media Suspect Investigators in Preventing Online...
Unveiling the Role of Social Media Suspect Investigators in Preventing Online...Unveiling the Role of Social Media Suspect Investigators in Preventing Online...
Unveiling the Role of Social Media Suspect Investigators in Preventing Online...Milind Agarwal
 
Decoding the Heart: Student Presentation on Heart Attack Prediction with Data...
Decoding the Heart: Student Presentation on Heart Attack Prediction with Data...Decoding the Heart: Student Presentation on Heart Attack Prediction with Data...
Decoding the Heart: Student Presentation on Heart Attack Prediction with Data...Boston Institute of Analytics
 
Cyber awareness ppt on the recorded data
Cyber awareness ppt on the recorded dataCyber awareness ppt on the recorded data
Cyber awareness ppt on the recorded dataTecnoIncentive
 
Minimizing AI Hallucinations/Confabulations and the Path towards AGI with Exa...
Minimizing AI Hallucinations/Confabulations and the Path towards AGI with Exa...Minimizing AI Hallucinations/Confabulations and the Path towards AGI with Exa...
Minimizing AI Hallucinations/Confabulations and the Path towards AGI with Exa...Thomas Poetter
 

Último (20)

Data Factory in Microsoft Fabric (MsBIP #82)
Data Factory in Microsoft Fabric (MsBIP #82)Data Factory in Microsoft Fabric (MsBIP #82)
Data Factory in Microsoft Fabric (MsBIP #82)
 
Learn How Data Science Changes Our World
Learn How Data Science Changes Our WorldLearn How Data Science Changes Our World
Learn How Data Science Changes Our World
 
Advanced Machine Learning for Business Professionals
Advanced Machine Learning for Business ProfessionalsAdvanced Machine Learning for Business Professionals
Advanced Machine Learning for Business Professionals
 
Data Analysis Project Presentation: Unveiling Your Ideal Customer, Bank Custo...
Data Analysis Project Presentation: Unveiling Your Ideal Customer, Bank Custo...Data Analysis Project Presentation: Unveiling Your Ideal Customer, Bank Custo...
Data Analysis Project Presentation: Unveiling Your Ideal Customer, Bank Custo...
 
Digital Marketing Plan, how digital marketing works
Digital Marketing Plan, how digital marketing worksDigital Marketing Plan, how digital marketing works
Digital Marketing Plan, how digital marketing works
 
Student Profile Sample report on improving academic performance by uniting gr...
Student Profile Sample report on improving academic performance by uniting gr...Student Profile Sample report on improving academic performance by uniting gr...
Student Profile Sample report on improving academic performance by uniting gr...
 
Semantic Shed - Squashing and Squeezing.pptx
Semantic Shed - Squashing and Squeezing.pptxSemantic Shed - Squashing and Squeezing.pptx
Semantic Shed - Squashing and Squeezing.pptx
 
SMOTE and K-Fold Cross Validation-Presentation.pptx
SMOTE and K-Fold Cross Validation-Presentation.pptxSMOTE and K-Fold Cross Validation-Presentation.pptx
SMOTE and K-Fold Cross Validation-Presentation.pptx
 
INTRODUCTION TO Natural language processing
INTRODUCTION TO Natural language processingINTRODUCTION TO Natural language processing
INTRODUCTION TO Natural language processing
 
FAIR, FAIRsharing, FAIR Cookbook and ELIXIR - Sansone SA - Boston 2024
FAIR, FAIRsharing, FAIR Cookbook and ELIXIR - Sansone SA - Boston 2024FAIR, FAIRsharing, FAIR Cookbook and ELIXIR - Sansone SA - Boston 2024
FAIR, FAIRsharing, FAIR Cookbook and ELIXIR - Sansone SA - Boston 2024
 
Data Analysis Project : Targeting the Right Customers, Presentation on Bank M...
Data Analysis Project : Targeting the Right Customers, Presentation on Bank M...Data Analysis Project : Targeting the Right Customers, Presentation on Bank M...
Data Analysis Project : Targeting the Right Customers, Presentation on Bank M...
 
Networking Case Study prepared by teacher.pptx
Networking Case Study prepared by teacher.pptxNetworking Case Study prepared by teacher.pptx
Networking Case Study prepared by teacher.pptx
 
Conf42-LLM_Adding Generative AI to Real-Time Streaming Pipelines
Conf42-LLM_Adding Generative AI to Real-Time Streaming PipelinesConf42-LLM_Adding Generative AI to Real-Time Streaming Pipelines
Conf42-LLM_Adding Generative AI to Real-Time Streaming Pipelines
 
Data Analysis Project: Stroke Prediction
Data Analysis Project: Stroke PredictionData Analysis Project: Stroke Prediction
Data Analysis Project: Stroke Prediction
 
NO1 Certified Black Magic Specialist Expert Amil baba in Lahore Islamabad Raw...
NO1 Certified Black Magic Specialist Expert Amil baba in Lahore Islamabad Raw...NO1 Certified Black Magic Specialist Expert Amil baba in Lahore Islamabad Raw...
NO1 Certified Black Magic Specialist Expert Amil baba in Lahore Islamabad Raw...
 
Bank Loan Approval Analysis: A Comprehensive Data Analysis Project
Bank Loan Approval Analysis: A Comprehensive Data Analysis ProjectBank Loan Approval Analysis: A Comprehensive Data Analysis Project
Bank Loan Approval Analysis: A Comprehensive Data Analysis Project
 
Unveiling the Role of Social Media Suspect Investigators in Preventing Online...
Unveiling the Role of Social Media Suspect Investigators in Preventing Online...Unveiling the Role of Social Media Suspect Investigators in Preventing Online...
Unveiling the Role of Social Media Suspect Investigators in Preventing Online...
 
Decoding the Heart: Student Presentation on Heart Attack Prediction with Data...
Decoding the Heart: Student Presentation on Heart Attack Prediction with Data...Decoding the Heart: Student Presentation on Heart Attack Prediction with Data...
Decoding the Heart: Student Presentation on Heart Attack Prediction with Data...
 
Cyber awareness ppt on the recorded data
Cyber awareness ppt on the recorded dataCyber awareness ppt on the recorded data
Cyber awareness ppt on the recorded data
 
Minimizing AI Hallucinations/Confabulations and the Path towards AGI with Exa...
Minimizing AI Hallucinations/Confabulations and the Path towards AGI with Exa...Minimizing AI Hallucinations/Confabulations and the Path towards AGI with Exa...
Minimizing AI Hallucinations/Confabulations and the Path towards AGI with Exa...
 

数学カフェ 確率・統計・機械学習回 「速習 確率・統計」

  • 1.
  • 3.
  • 4.
  • 5.
  • 6.
  • 7.
  • 8.
  • 11. D = {x1, x2, · · · , xn} ¯x = 1 n nX i=1 xi 2 = 1 n nX i=1 (xi ¯x)2 = v u u t 1 n nX i=1 (xi ¯x)2
  • 12.
  • 13.
  • 14.
  • 15.
  • 16.
  • 17.
  • 18.
  • 20.
  • 21.
  • 22.
  • 24. p
  • 26.
  • 27.
  • 28.
  • 29.
  • 30.
  • 31.
  • 32.
  • 33.
  • 35. ! 2 ⌦ = {!1, !2, · · · , !m} ⌦ = { , } ! 2 { , } !(1) = !(2) = !(n) =
  • 36. ⌦ = {1, 2, 3, 4, 5, 6} !(1) = !(2) = !(n) = ⌦ = {!1, !2, · · · , !49870000} !(1) = !43890298 = 171cm !(2) = !29184638 = 168cm !(n) = !51398579 = 174cm
  • 37. !(1) = !(2) = !(n) =!(3) = !1 !2 !3 !4 !5 !6 !7 !8 !9 !10 = {!1, !2, !3, · · · , !10} ! 2 ⌦ = {ID1, ID2, ID3, · · · , ID10}
  • 39. X = X(!) ⌦ ! ! X(!1) = 0 X(!2) = 0 X(!3) = 0 X(!4) = 0 X(!5) = 0 X(!6) = 0 X(!7) = 0 X(!8) = 0 X(!9) = 0 X(!10) = 100
  • 40. ! {! 2 ⌦ : X(!) 2 A} {X 2 A} X(!) X
  • 41. {! 2 ⌦ : X(!) 2 A} !1 !2 !3 !4 !5 !6 !7 !8 !9 !10 A X(!) = 100Ac X(!) = 0 !5 or !9
  • 42. PX (A) = P(X 2 A) = P({! 2 ⌦ : X(!) 2 A}) ⌦ !5, !9 !5, !9 PX (A) = #({! 2 ⌦ : X(!) 2 A}) #( ) = #(!5, !9) #( ) = 2 10 = 0.2
  • 43. PX(⌦) = 1 A1, A2, · · · PX ([iAi) = X i PX (Ai) A1 A2 A3 A4 A5 A6 A7 A8 A9 A10 A11 A12 0  PX(A)  1
  • 44.
  • 45. X = X(!) ⌦ A A !1 !2 !3 !4 !5 !6 !7 !8 !11 !10 !9 !12 !13 !14 !15 !16 B C D X(!) = 0 X(!) = 0 #A = #{! 2 ⌦ : X(!) = 0} = 7 #B = #{! 2 ⌦ : X(!) = 1} = 2 #C = #{! 2 ⌦ : X(!) = 2} = 4 #D = #{! 2 ⌦ : X(!) = 3} = 3
  • 46. ⌦ A A !1 !2 !3 !4 !5 !6 !7 !8 !11 !10 !9 !12 !13 !14 !15 !16 B C DX(!) = 0 P(X = 0) = PX(A) = #{! 2 ⌦ : X(!) = 0} #⌦ = 7 16 P(X = 1) = PX (B) = #{! 2 ⌦ : X(!) = 1} #⌦ = 2 16 P(X = 2) = PX(C) = #{! 2 ⌦ : X(!) = 2} #⌦ = 4 16 P(X = 3) = PX(D) = #{! 2 ⌦ : X(!) = 3} #⌦ = 3 16
  • 47. {x1, x2, · · · , xk} P(X = xi) = f(xi) F(x) = P(X  x)
  • 48. P(x < X  x + x) x + xx x x ! 0 f(x) = lim x!0 P(x < X  x + x) x
  • 49. x + xx f(x) F(x) = P(X  x) = Z x 1 f(u)du f(a < x < b) = Z b a f(x)dx
  • 51.
  • 52. P(X = x) = px (1 p)1 x (x = 0, 1)
  • 53. # # p = 0.7 trial_size = 10000 set.seed(71) # data <- rbern(trial_size, p) # dens <- data.frame(y=c((1-p),p)*trial_size, x=c(0, 1)) # ggplot() + layer(data=data.frame(x=data), mapping=aes(x=x), geom="bar", stat="bin", bandwidth=0.1 ) + layer(data=dens, mapping=aes(x=x, y=y), geom="bar", stat="identity", width=0.05, fill="#777799", alpha=0.7)
  • 54.
  • 55. (x = 0, 1, · · · , n)
  • 56.
  • 57. # p = 0.7 trial_size = 10000 sample_size = 30 set.seed(71) # gen_binom_var <- function() { return(sum(rbern(sample_size, p))) } result <- rdply(trial_size, gen_binom_var()) # dens <- data.frame(y=dbinom(seq(sample_size), sample_size, 0.7))*trial_size # ggplot() + layer(data=resuylt, mapping=aes(x=V1), geom="bar", stat = "bin", binwidth=1, fill="#6666ee", color="gray" ) + layer(data=dens, mapping=aes(x=seq(sample_size)+.5, y=y), geom="line", stat="identity", position="identity",colour="red" ) + ggtitle("Bernoulli to Binomial.")
  • 58.
  • 59.
  • 60. P(X = x) = e x x!
  • 61.
  • 62. trial_size = 5000; width <- 1; # p = 0.7; n = 10; np <- p*n # n!∞ p!0 np= n = 100000; p <- np/n # gen_binom_var <- function() { return(sum(rbern(n, p))) } result <- rdply(trial_size, gen_binom_var()) # dens <- data.frame(y=dpois(seq(20), np))*trial_size # ggplot() + layer(data=result, mapping=aes(x=V1), geom="bar", stat = "bin", binwidth=width, fill="#6666ee", color="gray" ) + layer(data=dens, mapping=aes(x=seq(20)+.5, y=y), geom="line", stat="identity", position="identity", colour="red" ) + ggtitle("Bernoulli to Poisson.")
  • 63.
  • 64. f(x) = 1 p 2⇡ 2 exp ⇢ 1 2 (x µ)2 2 ( 1 < x < 1)
  • 65.
  • 66.
  • 67. # n <- 10000; p <- 0.7; trial_size = 10000 width=10 # gen_binom_var <- function() { return(sum(rbern(n, p))) } result <- rdply(trial_size, gen_binom_var()) # dens <- data.frame(y=dnorm(seq(6800,7200), mean=n*p, sd=sqrt(n*p*(1-p)))*trial_size*width) # ggplot() + layer(data=result, mapping=aes(x=V1), geom="bar", stat = "bin", binwidth=width, fill="#6666ee", color="gray" ) + layer(data=dens, mapping=aes(x=seq(6800,7200), y=y), geom="line", stat="identity", position="identity", colour="red") + ggtitle("Bernoulli to Normal.")
  • 68.
  • 69. ( 1 < x < 1) f(x) = 1 p 2⇡ exp ⇢ 1 2 x2
  • 70.
  • 71. # n <- 10000; p <- 0.7 trial_size = 30000 width=0.18 # gen_binom_var <- function() { return(sum(rbern(n, p))) } result <- rdply(trial_size, gen_binom_var()) m <- mean(result$V1); sd <- sd(result$V1); result <- (result - m)/sd # dens <- data.frame(y=dnorm(seq(-4,4,0.05), mean=0, sd=1)*trial_size*width) # ggplot() + layer(data=result, mapping=aes(x=V1), geom="bar", stat = "bin", binwidth=width, fill="#6666ee", color="gray" ) + layer(data=dens, mapping=aes(x=seq(-4,4,0.05), y=y), geom="line", stat="identity", position=“identity", colour="red" ) + ggtitle("Bernoulli to Standard Normal.")
  • 72.
  • 73. f(x, k) = (1/2)k/2 (k/2) xk/2 1 e x/2 (0  x) Xi Z = X2 1 + · · · + X2 k
  • 74.
  • 75. # p <- 0.7; n <- 1000; trial_size <- 100000; width <- 0.3; df <- 3 # (3 ) gen_binom_var <- function() { return(sum(rbern(n, p))) } gen_chisq_var <- function() { result <- rdply(trial_size, gen_binom_var()) return(((result$V1 - mean(result$V1))/sd(result$V1))**2) } # df result <- rlply(df, gen_chisq_var(),.progress = "text") res <- data.frame(x=result[[1]] + result[[2]] + result[[3]]) # ( =3) xx <- seq(0,20,0.1) dens <- data.frame(y=dchisq(x=xx, df=df)*trial_size*width) # ggplot() + layer(data=data, mapping=aes(x=x), geom="bar", stat = "bin", binwidth=width, fill="#6666ee", color="gray" ) + layer(data=dens, mapping=aes(x=xx, y=y), geom="line", stat="identity", position="identity", colour="blue" ) + ggtitle("Bernoulli to Chisquare")
  • 76.
  • 77.
  • 78. f(x, ) = ⇢ e x (x 0) 0 (x < 0)
  • 79.
  • 80. trial_size = 7000; width <- .01; # p = 0.7; n = 10; np <- p*n; # n!∞ p!0 np= n = 10000; p <- np/n # gen_exp_var <- function() { cnt <- 0 while (TRUE) { cnt <- cnt + 1 if (rbern(1, p)==1){ return(cnt) # 1 } } } data <- data.frame(x=rdply(trial_size, gen_exp_var())/n) names(data) <- c("n", "x") # dens <- data.frame(y=dexp(seq(0, 1.5, 0.1), np)*trial_size*width) ggplot() + layer(data=data, mapping=aes(x=x), geom="bar", stat = "bin", binwidth=width, fill="#6666ee", color="gray" ) + layer(data=dens, mapping=aes(x=seq(0, 1.5, 0.1), y=y), geom="line", stat="identity", position="identity", colour="red" ) + ggtitle("Bernoulli to Exponential.")
  • 81.
  • 82. f(x, ↵, ) = ↵ (↵) x↵ 1 exp( x) (0  x < 1) ↵X i=1 Xi ⇠ (↵, )Xi ⇠ Exp( )
  • 83.
  • 84. trial_size = 7000; width <- .035; # p = 0.7; n = 10; np <- p*n; # n!∞ p!0 np= n = 10000; p <- np/n; alpha <- 5 # get_interval <- function(){ cnt <- 0 while (TRUE) { cnt <- cnt + 1 if (rbern(1, p)==1){ return(cnt) } } } gen_exp_var <- function() { data <- data.frame(x=rdply(trial_size, get_interval())/n) names(data) <- c("n", "x") return(data) } result <- rlply(alpha, gen_exp_var()) data <- data.frame(x=result[[1]]$x + result[[2]]$x + result[[3]]$x + result[[4]]$x + result[[5]]$x) # dens <- data.frame(y=dgamma(seq(0, 3,.01), shape=alpha, rate=np)*trial_size*width) ggplot() + layer(data=data, mapping=aes(x=x), geom="bar", stat = "bin", binwidth=width, fill="#6666ee", color="gray" ) + layer(data=dens, mapping=aes(x=seq(0,3,.01), y=y), geom="line", stat="identity", position="identity", colour="red" ) + ggtitle("Bernoulli to Gamma")
  • 85.
  • 86. f(x, ↵, ) = ↵ (↵) x (↵+1) exp ✓ x ◆ (0  x < 1) Xi ⇠ Exp( ) Z = ↵X i=1 Xi ⇠ (↵, ) 1/Z ⇠ IG(↵, )
  • 87.
  • 88. trial_size = 7000; width <- .; # p = 0.7; n = 10; np <- p*n; # n!∞ p!0 np= n = 10000; p <- np/n; alpha <- 5 # get_interval <- function(){ cnt <- 0 while (TRUE) { cnt <- cnt + 1 if (rbern(1, p)==1){ return(cnt) } } } gen_exp_var <- function() { data <- data.frame(x=rdply(trial_size, get_interval())/n) names(data) <- c("n", "x") return(data) } result <- rlply(alpha, gen_exp_var()) data <- data.frame(x=1/(result[[1]]$x + result[[2]]$x + result[[3]]$x + result[[4]]$x + result[[5]]$x)) # dens <- data.frame(y=dinvgamma(seq(0, 23,.01), shape=5, rate=1/np)*trial_size*width) ggplot() + layer(data=data, mapping=aes(x=x), geom="bar", stat = "bin", binwidth=width, fill="#6666ee", color="gray" ) + layer(data=dens, mapping=aes(x=seq(0,3,.01), y=y), geom="line", stat="identity", position="identity", colour="red" ) + ggtitle("Bernoulli to Inversegamma")
  • 89.
  • 90. f(x) = ⇢ 1 (0  x  1) 0 (otherwise)
  • 91. Z = x1(1/2)1 + x2(1/2)2 + · · · + xq(1/2)q
  • 92. width <- 0.02 p <- 0.5; sample_size <- 1000 trial_size <- 100000 gen_unif_rand <- function() { # sample_size 2 # return (sum(rbern(sample_size, p) * (rep(1/2, sample_size) ** seq(sample_size)))) } gen_rand <- function(){ return( rdply(trial_size, gen_unif_rand()) ) } system.time(res <- gen_rand()) ggplot() + layer(data=res, mapping=aes(x=V1), geom="bar", stat = "bin", binwidth=width, fill="#6666ee", color="gray" ) + ggtitle("Bernoulli to Standard Uniform")
  • 93.
  • 94. f(x, a, b) = ⇢ (b a) 1 (a  x  b) 0 (otherwise)
  • 95.
  • 96. a <- 5 b <- 8; width <- 0.05 p <- 0.5 sample_size <- 1000 trial_size <- 500000 gen_unif_rand <- function() { # sample_size 2 # return (sum(rbern(sample_size, p) * (rep(1/2, sample_size) ** seq(sample_size)))) } gen_rand <- function(){ return( rdply(trial_size, gen_unif_rand()) ) } system.time(res <- gen_rand()) res$V1 <- res$V1 * (b-a) + a ggplot() + layer(data=res, mapping=aes(x=V1), geom="bar", stat = "bin", binwidth=width, fill="#6666ee", color="gray" ) + ggtitle("Bernoulli to Uniform") + xlim(4,9)
  • 97.
  • 98. f(x, ↵, ) = 1 B(↵, ) x↵ 1 (1 x) 1 (0 < x < 1) Xi ⇠ U(0, 1)iid (i = 1, 2, · · · , ↵ + 1)
  • 99.
  • 100. width <- 0.03; p <- 0.5 digits_length <- 30; set_size <- 3 trial_size <- 30000 gen_unif_rand <- function() { # digits_length 2 # return (sum(rbern(digits_length, p) * (rep(1/2, digits_length) ** seq(digits_length)))) } gen_rand <- function(){ return( rdply(set_size, gen_unif_rand())$V1 ) } unif_dataset <- rlply(trial_size, gen_rand, .progress='text') p <- ceiling(set_size * 0.5); q <- set_size - p + 1 get_nth_data <- function(a){ return(a[order(a)][p]) } disp_data <- data.frame(lapply(unif_dataset, get_nth_data)) names(disp_data) <- seq(length(disp_data)); disp_data <- data.frame(t(disp_data)) names(disp_data) <- "V1" x_range <- seq(0, 1, 0.001) dens <- data.frame(y=dbeta(x_range, p, q)*trial_size*width) ggplot() + layer(data=disp_data, mapping=aes(x=V1), geom="bar", stat = "bin", binwidth=width, fill="#6666ee", color="gray" ) + layer(data=dens, mapping=aes(x=x_range, y=y), geom="line", stat="identity", position="identity", colour="red" ) + ggtitle("Bernoulli to Beta")
  • 101.
  • 102. E[X] = X( )P( ) + X( )P( ) = 0 ⇥ 0.8 + 1, 000, 000 ⇥ 0.2 = 200, 000 E[X] = X x xp(x) µ
  • 103. ✓ n x ◆ = n! (n x)!x! E[X] = nX x=0 xP(x) = nX x=0 x ✓ n x ◆ px (1 p)n x = nX x=0 x n! (n x)!x! px (1 p)n x = nX x=0 n (n 1)! (n x)!(x 1)! px (1 p)n x = np nX x=0 ✓ n 1 m 1 ◆ p(x 1) (1 p)(n 1) (x 1) = np = np nX x=1 ✓ n 1 m 1 ◆ p(x 1) (1 p)(n 1) (x 1) = np
  • 104. Var[X] = E[(X E[X])2 ] = X x (x E[x])2 P(x) = 2 µ
  • 105. Var[x] = E[(X E[X])2 ] = Z 1 1 (x E[x])2 f(x)dx = 2 E[X] = Z 1 1 xf(x)dx = µ
  • 106. E[g(X)] = Z 1 1 g(x)f(x)dx g(X) = (X E[X])2 E[ · ] = Z 1 1 · f(x)dx
  • 107. g(x) = xk E[g(X)] = E[Xk ] = Z 1 1 xk f(x)dx µ0 k
  • 108. g(x) = (x E[x])k E[g(X)] = E[(X E[X]])k ] = Z 1 1 (x E[x])k f(x)dx µk
  • 109. E[cX] = cE[X] * E[cX] = Z 1 1 cxf(x)dx = c Z 1 1 xf(x)dx = cE[X]
  • 110. Var[cX] = c2 Var[X] * Var[cX] = Z 1 1 (cx E[cx])2 f(x)dx = Z 1 1 (cx cµ)2 f(x)dx = Z 1 1 c2 (x µ)2 f(x)dx = c2 Z 1 1 (x µ)2 f(x)dx = c2 Var[X]
  • 111.
  • 112. P(x < X 5 x + x, y < Y 5 y + y) x, y ! 0 f(x, y) = lim x, y!0 P(x < X 5 x + x, y < Y 5 y + y) f(x, y)
  • 113. g(x) = Z 1 1 f(x, y)dy h(y) = Z 1 1 f(x, y)dx g(x) h(y)
  • 114. EX,Y [ g(X, Y )] = Z 1 1 Z 1 1 g(x, y)f(x, y)dxdy g(x, y) = x0.8 y0.8 (x, y) ⇠ N((4, 4), S) S =  1 0.5 0.4 1 EX,Y [ g(X, Y )] = 8.02
  • 115. g(X, Y ) = (X µX)(Y µY ) Cov[X, Y ] = E[(X µX)(Y µY )]
  • 116. g(X, Y ) = (X µX)(Y µY ) µX µX µX µX µY µY µY µY S1 = S2 = S3 = S4 =  1 0.8 0.8 1  1 0.8 0.8 1  1 0 0 1  1 0.999 0.999 1 Cov[X, Y ] = E[(X µX)(Y µY )] (x, y) ⇠ N((4, 4), S)
  • 117. f(x, y) f(x, y) = g(x)h(y)
  • 118. f(x, y) = g(x)h(y) = 0
  • 119. (x1, x2, · · · , xn) x1 f(x1) = Z · · · Z f(x1, · · · , xn)dx2 · · · dxn x1 f(x1, · · · , xn) = f(x1) · · · f(xn) x1 · · · xn
  • 120. x1 · · · xn g1(x1), · · · , gn(xn) x1 · · · xn E[ nY i=1 gi(xi)] = nY i=1 E[gi(xi)] E[g1(x1)] E[gn(xn)] E[ nY i=1 gi(xi)] = Z 1 1 · · · Z 1 1 g1(x1) · · · gn(xn)f(x1, · · · , xn)dx1 · · · dxn = Z 1 1 g1(x1)f(x1)dx1 · · · Z 1 1 gn(xn)f(xn)dxn = nY i=1 E[gi(xi)] f(x1) · · · f(xn)
  • 121. x1 · · · xn xi µi 2 i i = 1, 2, · · · , n c = (c1, · · · , cn) c1x1 + · · · + cnxn c1µ1 + · · · + cnµn c2 1 2 1 + · · · + c2 n 2 n
  • 122. E[c1x1 + · · · + cnxn] = Z 1 1 · · · Z 1 1 (c1x1 + · · · + cnxn)f(x1 · · · , xn)dx1 · · · dxn = c1 Z 1 1 · · · Z 1 1 x1f(x1 · · · , xn)dx1 · · · dxn · · · cn Z 1 1 · · · Z 1 1 xnf(x1 · · · , xn)dx1 · · · dxn =c1 Z 1 1 x1dx1 · · · cn Z 1 1 xndxn =c1µ1 + · · · + cnµn f(x1) · · · f(xn) f(x1) · · · f(xn) µ1 µn =c1 Z 1 1 x1dx1 · · · cn Z 1 1 xndxn =c1µ1 + · · · + cnµn
  • 123. Var[c1x1 + · · · + cnxn] = E[{(c1x1 + · · · + cnxn) E[c1x1 + · · · + cnxn]}2 ] = E[{c1(x1 µ1) + · · · + c1(x1 µ1)}2 ] = E[ nX i=1 c2 i (xi µi)2 + X i6=j cicj(xi µj)(xi µj)] = nX i=1 c2 i E[(xi µi)2 ] + X i6=j cicjE[(xi µj)(xi µj)] = c2 1 2 1 + · · · + c2 n 2 n c1µ1 + · · · + cnµn = E[xi µi]E[xj µj] = 0= 2 i
  • 124.
  • 125. x1 · · · xn x1 · · · xn xi µ 2 (µ, 2 )
  • 126. x1 · · · xn T = x1 + · · · + xn E[T] = E[x1 + · · · + xn] = E[x1] + · · · + E[xn] = nµ Var[T] = Var[x1 + · · · + xn] = Var[x1] + · · · + Var[xn] = n 2 2 1 = · · · = 2 n c1 = · · · = cn = 1 Var[c1x1 + · · · + cnxn] = c2 1 2 1 + · · · + c2 n 2 n
  • 127. ¯x = 1 n nX i=1 xi = 1 n T E[¯x] = 1 n E[T] = n · 1 n µ = µ Var[¯x] = Var[ 1 n T] = 1 n2 Var[T] = 2 n µ 2
  • 130.
  • 131. µ 2 P(|x µ| > ) 5 1 2 µ 2 1/ 2 = 1 ) P(|x µ| > ) 5 1 = 2 ) P(|x µ| > ) 5 1/4 = 3 ) P(|x µ| > ) 5 1/9
  • 132. 2 = Z 1 1 (x µ)2 f(x)dx = Z I1 (x µ)2 f(x)dx + Z I2 (x µ)2 f(x)dx + Z I3 (x µ)2 f(x)dx 2 = Z I1 (x µ)2 f(x)dx + Z I3 (x µ)2 f(x)dx = Z I1 2 2 f(x)dx + Z I3 2 2 f(x)dx = 2 2 [P(x 2 I1) + P(x 2 I3)] I1 = ( 1, µ ), I2 = [µ , µ + ], I3 = (µ + , 1) = P(|x µ| > ) P(|x µ| > ) 5 1 2 )
  • 133. x1 · · · xn µ 2 " > 0 lim n!1 P{|¯xn µ| = "} = 0 ¯xn = 1 n nX i=1 xi ¯xn µ ¯xn ! µ in P
  • 134. " > 0 P(|¯xn µ| > ") = P(|¯xn µ| > " p n p n ) 5 2 "2n = 2 ¯x= = 1 2
  • 135.
  • 136.
  • 137. f(x) = 1 p 2⇡ 2 exp ✓ (x µ)2 2 2 ◆ f(x) = 1 p 2⇡ exp ✓ x2 2 ◆ 1 < x < 1 1 < x < 1
  • 139. f(x) = x2 f(y) = y2
  • 140. f(y) = exp( y2 )
  • 141. z = p 2y f(z) = exp ✓ 1 2 z2 ◆
  • 142. Z 1 1 e y2 dy = p ⇡ Z 1 1 exp ✓ z2 2 ◆ dz = p 2⇡ Z 1 1 1 p 2⇡ exp ✓ z2 2 ◆ dz = 1 dz = p 2dy
  • 144. z = x µ dz dx = 1 f(x) = Z 1 1 1 p 2⇡ 2 exp ✓ (x µ)2 2 2 ◆ dx 1/
  • 145.
  • 146. D = (x1, · · · , xn) µ 2 ¯x µ / p n , n ! 1 N(0, 1) = 0.1, µ = 1 = 10, 2 = 1 2 = 100 ¯x = p n = r 1 2n = r 1 0.01 ⇥ 10000 = r 1 100 = 1 10
  • 147. g(x) = ext E[ext ] = Z 1 1 ext f(x)dx Mx(t) = E[ext ] Mx(t) My(t) x t = 0 y
  • 148. g(x) = ext ext = 1 + xt + t2 2! x2 + · · · + tk k! xk + · · · Mx(t) = E[ext ] = E[1 + xt + t2 2! x2 + · · · + tk k! xk + · · · ] = 1 + tE[x] + t2 2! E[x2 ] + · · · + tk k! E[xk ] + · · · = 1 + xµ0 1 + t2 2! µ0 2 + · · · + tk k! µ0 k + · · ·
  • 149. Mx(t) d dtk Mx(t) = E[xk ext ] t = 0 d dtk Mx(0) = E[xk ] = µ0 k
  • 150. x ⇠ N(µ, ) Mx(t) = E[ext ] = Z 1 1 ext 1 p 2⇡ 2 exp ✓ 1 2 (x µ)2 2 ◆ dx z = x µ x = µ + z dx = dz
  • 151. Mx(t) = Z 1 1 e(µ+ z)t 1 p 2⇡ 2 exp ✓ 1 2 z2 ◆ dz = eµt Z 1 1 1 p 2⇡ exp ✓ tz 1 2 z2 ◆ dz = eµt Z 1 1 1 p 2⇡ exp ✓ 1 2 [z2 2 tz 2 t2 + 2 t2 ] ◆ dz = eµt Z 1 1 1 p 2⇡ e 2t2 2 exp ✓ 1 2 (z t)2 ◆ dz = eµt e 2t2 2 Z 1 1 1 p 2⇡ exp ✓ 1 2 (z t)2 ◆ dz z t = w dz = dw Mx(t) = eµt e 2t2 2 Z 1 1 1 p 2⇡ exp ✓ w2 2 ◆ dw = eµt+ 2t2 2
  • 152. (f · g)0 = f0 · g + f · g0 (f g)0 (x) = f0 (g(x))g0 (x) M0 x(t) = (µ + 2 t)eµt+ 2t2 2 M00 x (t) = (µ + 2 t)2 ⇣ eµt+ 2t2 2 ⌘ + 2 ⇣ eµt+ 2t2 2 ⌘ = ⇣ eµt+ 2t2 2 ⌘ {(µ + 2 t)2 + 2 }
  • 153. Var[x] = E[x2 ] (E[x])2 = (µ2 + 2 ) (µ)2 = 2 Var[x] = E[(x E[x])2 ] = E[x2 2E[x]x + E[x]2 ) = E[x2 ] 2E[x]2 + E[x]2 = E[x2 ] E[x]2 t = 0 E[x] = M0 x(0) = (µ + 2 · 0)eµ·0+ 2·02 2 = µ E[x2 ] = M00 x (0) = ⇣ eµ·0+ 2·02 2 ⌘ {(µ + 2 · 0)2 + 2 } = µ2 + 2
  • 154. D = (x1, · · · , xn) µ 2 ¯x µ / p n , n ! 1 N(0, 1) T = x1 + · · · + xn T nµ p n 2T0 = T nµ p n = ¯x µ 1/ p n
  • 155. Mx(t) My(t) x t = 0 y T T0 = T nµ p n N(0, 2 )
  • 156. Mxi (t) = 1 + µ0 1t + µ0 2 t2 2! + µ0 3 t3 3! + · · · Mxi µ(t) = 1 + µ1t + µ2 t2 2! + µ3 t3 3! + · · · = 1 + 0 + 2 t2 2! + µ3 t3 3! + · · ·
  • 157. xi µ p n xi µ p n Mxi µ p n (t) = E[e xi µ p n t ] = 1 + 2 t2 2!n + µ3 t3 3!n3/2 + · · · + µk tk k!nk/2 + · · · = 1 + 2 t2 2n + n 2n = 1 2n n n ! 0 n ! 0 = 1 + 2 t2 + n 2n
  • 158. T0 = x1 µ p n + x2 nµ p n + · · · + xn µ p n = nX i=1 xi µ p n MT 0 (t) = MPn i=1 ⇣ xi µ p n ⌘(t) = E[e Pn i=1 ⇣ xi µ p n ⌘ t ] = nY i=0 E[e ⇣ xi µ p n ⌘ t ] = ✓ 1 + 1 n 2 t2 + n 2 ◆n er ⌘ lim n!1 ⇣ 1 + r n ⌘n r r = lim n!1 ⇣ 1 + r n ⌘n
  • 159. n ! 1 lim n!1 MT 0 = lim n!1 ✓ 1 + 1 n 2 t2 + n 2 ◆n = e 2t2 2 lim n!1 n = 0 N(0, 2 ) T0 = T nµ p n 2
  • 160. n = 100000 sample_size = 1000 rvs_list = [] m_list = [] for i in range(n): unif_rvs = st.uniform.rvs(4.5, size=sample_size) # 5 beta_rvs = st.beta.rvs(a=3, b=3, size=sample_size) # 0.5 β gamma_rvs = st.gamma.rvs(a=3, size=sample_size) # 3 chi2_rvs = st.chi2.rvs(df=5, size=sample_size) # exp_rvs = st.expon.rvs(loc=0, size=sample_size) # 1 rvs = np.array([unif_rvs, beta_rvs, gamma_rvs, chi2_rvs, exp_rvs]).flatten() m_list.append(np.mean(rvs)) rvs_list.append(rvs)
  • 161. # n = 10000 sample_size = 1000 rvs_list = [] m_list = [] m_unif = st.uniform.rvs(4, 2, size=sample_size) m_beta_a = st.uniform.rvs(4, 2, size=sample_size) m_beta_b = st.uniform.rvs(4, 2, size=sample_size) m_gamma = rd.randint(2,5,size=sample_size) m_chi2_df = rd.randint(3,6,size=sample_size) m_exp = st.uniform.rvs(4, 2, size=sample_size) def gen_random_state(): return int(dt.now().timestamp() * 10**6) - 1492914610000000 + rd.randint(0, 1000000) def create_rvs(n): #rd.seed = int(dt.now().timestamp() * 10**6) - 1492914610000000 + rd.randint(0, 1000000) print("[START]") for _ in range(n): unif_rvs = [st.uniform.rvs(m, size=1, random_state=gen_random_state()) for m in m_unif] # 5 beta_rvs = [st.beta.rvs(a=a, b=b, size=1, random_state=gen_random_state()) for a, b in zip(m_beta_a, m_beta_b)]# 0.5 β gamma_rvs = [st.gamma.rvs(a=a, size=1, random_state=gen_random_state()) for a in m_gamma] # 3 chi2_rvs = [st.chi2.rvs(df=d, size=1, random_state=gen_random_state()) for d in m_chi2_df] # exp_rvs = [st.expon.rvs(loc=l, size=1, random_state=gen_random_state()) for l in m_exp] # 1 rvs = np.array([unif_rvs, beta_rvs, gamma_rvs, chi2_rvs, exp_rvs]).flatten() l_mean.append(np.mean(rvs)) l_rvs.append(rvs) print("[END]")
  • 162. n_jobs = 20 n_each = int(n/n_jobs) jobs = [Process(target=create_rvs, args=(n_each,)) for _ in range(n_jobs)] manager = Manager() l_rvs = manager.list(range(len(jobs))) l_mean = manager.list(range(len(jobs))) start_time = time.time() for j in jobs: j.start() time.sleep(0.2) for j in jobs: j.join() finish_time = time.time() print(finish_time - start_time) m_list = l_mean[n_jobs:] rvs_list = np.array(l_rvs[n_jobs:]) print(rvs_list.shape)
  • 163.
  • 164. D = (x1, · · · , xn)
  • 165. ✓0 = ˆ✓(X1, · · · , Xn) ˆ✓lower(X1, · · · , Xn) 5 ✓0 5 ˆ✓upper(X1, · · · , Xn)
  • 168. E[(ˆ✓(X) ✓)2 ] = E[{(E[ˆ✓(X)] ✓) + (ˆ✓(X) E[ˆ✓(X)])}2 ] = E[(E[ˆ✓(X)] ✓)2 + 2(E[ˆ✓(X)] ✓)(ˆ✓(X) E[ˆ✓(X)]) + (ˆ✓(X) E[ˆ✓(X)])2 ] = (E[ˆ✓(X)] ✓)2 + Var[ˆ✓(X)] E[ˆ✓(X)] ✓ E[(ˆ✓(X) ✓)2 ] = Var[ˆ✓(X)]
  • 169. E[¯x] = 1 n E[T] = n · 1 n µ = µ ¯x s2 = 1 n 1 nX i=1 (xi ¯x)2
  • 170.
  • 171. lim n!1 P{|¯xn µ| = "} = 0 ¯xn ! µ in P ˆ✓n(X) n ! 1 ˆ✓n(X) ! ✓ in P ˆ✓n(X) ¯xn µ
  • 173.
  • 174.
  • 175. D = (x1, · · · , xn) xi f(xi) nY i=1 f(xi) nY i=1 f(xi|✓) xi `(✓|x1, x2, · · · , xn) = nY i=1 f(xi|✓)
  • 176. x1, x2, · · · , x10 f(x1, x2, · · · , x10|µ, 2 ) = 10Y i=1 1 p 2⇡ 2 exp ✓ 1 2 (xi µ)2 2 ◆
  • 177. `(µ, 2 |x1, x2, · · · , x10) = 10Y i=1 1 p 2⇡ 2 exp ✓ 1 2 (xi µ)2 2 ◆
  • 178.
  • 179. ✓⇤ = arg max ✓ `(✓|x1, x2, · · · , xn) log `(✓|x1, · · · , xn) ⌘ L(✓|x1, · · · , xn) `
  • 180.
  • 181. µ, 2 L(µ, 2 |x1, x2, · · · , x10) = n 2 (2⇡) n 2 log 2 1 2 2 nX i=1 (xi µ)2 @L @µ = 1 2 2 nX i=1 (xi µ)2 ) nX i=1 xi = nµ ) µ⇤ = 1 n nX i=1 xi `(µ, 2 |x1, x2, · · · , xn) = nY i=1 1 p 2⇡ 2 exp ✓ 1 2 (xi µ)2 2 ◆
  • 182. @L @ 2 = n 2 1 2 + 1 2( 2)2 nX i=1 (xi µ)2 = 0 ) 1 2( 2)2 nX i=1 (xi µ)2 = n 2 2 ) 2⇤ = 1 n nX i=1 (xi µ)2 2⇤
  • 183.
  • 184. D = (x1, · · · , xn)µ 2 µ
  • 185. u ⇠ N(0, 1) t = u p v/m v ⇠ 2 (m) f(t) = m+1 2 p m⇡ m 2 ✓ t2 m + 1 ◆ m+1 2
  • 186. u ⇠ N(0, 1) v ⇠ 2 (m) v > 01 < u < +1 f(u, v) = 1 p 2⇡ exp ✓ u2 2 ◆ (1/2)n/2 (n/2) vn/2 1 e v/2 t = u p v/m x = v f(t) = m+1 2 p m⇡ m 2 ✓ t2 m + 1 ◆ m+1 2 (z) = Z 1 0 tz 1 e t dt
  • 187. µ D = (x1, · · · , xn) xi ⇠ N(µ, 2 ) ¯x ⇠ N(µ, 2 /n)¯x 1 2 nX i=1 (xi ¯x)2 ⇠ 2 n 1
  • 188. u = ¯x µ / p n ⇠ N(0, 1) v = 1 2 nX i=1 (xi ¯x)2 ⇠ 2 n 1 t = u p v/(n 1) = ¯x µ / p n · " 1 2 1 (n 1) nX i=1 (xi ¯x)2 # 1/2 = ¯x µ 1/ p n · 1 p s2 = ¯x µ s/ p n ⇠ tn 1 s2 = 1 n 1 nX i=1 (xi ¯x)2 s2
  • 189. P ✓ tn 1;↵/2 5 ¯x µ s/ p n 5 tn 1;↵/2 ◆ = 1 ↵ tn 1;↵/2 tn 1;↵/2 ↵/2 ↵/2 1 ↵ 1 ↵ 1 ↵ P ✓ ¯x tn 1;↵/2 s p n 5 µ 5 ¯x + tn 1;↵/2 s p n ◆ = 1 ↵ [ tn 1;↵/2, tn 1;↵/2] µ 1 ↵
  • 190. P ✓ tn 1;↵/2 5 ¯x µ s/ p n 5 tn 1;↵/2 ◆ = 1 ↵ tn 1;↵/2 tn 1;↵/2 ↵/2 ↵/2 1 ↵ 1 ↵ 1 ↵ P ✓ ¯x tn 1;↵/2 s p n 5 µ 5 ¯x + tn 1;↵/2 s p n ◆ = 1 ↵ [ tn 1;↵/2, tn 1;↵/2] µ 1 ↵
  • 191.
  • 192.
  • 193. = 1 µ = 0 H0 : µ0 = 0 H1 : µ 6= µ0
  • 194. ¯x = / p n / p 10 ; /3.16
  • 195. ↵/2 ↵/2 H0 : µ0 = 0
  • 196.
  • 197.
  • 198.
  • 199.
  • 200.
  • 201.
  • 202. H1 : µ = 1
  • 203. H1 : µ = 0.5
  • 204. H1 : µ = 3 µ0H1 : µ = 3 H0 : µ0 = 0
  • 205.
  • 206. e↵ect size : = µ µ0
  • 207.
  • 208.
  • 209.
  • 210.
  • 211.
  • 213.
  • 214.
  • 215.
  • 216.
  • 217.
  • 218.
  • 219.
  • 220.
  • 221. r = 1 n Pn i=1(xi ¯x)(yi ¯y) q 1 n Pn i=1(xi ¯x)2 q 1 n Pn i=1(yi ¯y)2
  • 222. r = 1 n Pn i=1(xi ¯x)(yi ¯y) q 1 n Pn i=1(xi ¯x)2 q 1 n Pn i=1(yi ¯y)2
  • 223. r = 1 n Pn i=1(xi ¯x)(yi ¯y) q 1 n Pn i=1(xi ¯x)2 q 1 n Pn i=1(yi ¯y)2
  • 224. r = 1 n Pn i=1(xi ¯x)(yi ¯y) q 1 n Pn i=1(xi ¯x)2 q 1 n Pn i=1(yi ¯y)2