Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
I
IBD-Analysis
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Service Desk
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Operations
Operations
Incidents
Environments
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Harald RINGBAUER
IBD-Analysis
Commits
0679cab2
Commit
0679cab2
authored
May 04, 2016
by
Harald RINGBAUER
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Upload new file
parent
e1c4beab
Pipeline
#33
skipped
Changes
1
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
64 additions
and
0 deletions
+64
-0
mle_estimation.py
mle_estimation.py
+64
-0
No files found.
mle_estimation.py
0 → 100644
View file @
0679cab2
'''
Created on Nov 16, 2015
Class which does MLE estimation for perfect Data.
For real Binning see POPRES-Analysis
@author: Harald
'''
l0
=
0.05
# Minimum block length which is reported
import
numpy
as
np
import
math
from
statsmodels.base.model
import
GenericLikelihoodModel
from
scipy.special
import
kv
as
kv
def
single_pair
(
l_vec
,
r
,
C
,
sigma
):
'''Estimates the likelihood for a single pair. Assert len l_vec>0'''
return
np
.
sum
(
np
.
log
([
C
*
r
**
2
/
(
2
*
l
*
sigma
**
2
)
*
kv
(
2
,
np
.
sqrt
(
2
*
l
)
*
r
/
sigma
)
for
l
in
l_vec
]))
def
pairwise_ll
(
l
,
r
,
C
,
sigma
):
'''Full Pairwise Likelihood function for data.'''
print
(
"C: %.5f"
%
C
)
print
(
"Sigma: %.4f"
%
sigma
)
if
C
<=
0
or
sigma
<=
0
:
return
np
.
zeros_like
(
l
)
# If Parameters do not make sense.
else
:
pr_noshare
=
-
C
*
r
/
(
np
.
sqrt
(
2
*
l0
)
*
sigma
)
*
kv
(
1
,
np
.
sqrt
(
2
*
l0
)
*
r
/
sigma
)
# Standard vector of no-sharing
f_share
=
np
.
array
([
single_pair
(
l
[
i
],
r
[
i
],
C
,
sigma
)
if
l
[
i
]
!=
0
else
0.0
for
i
in
range
(
0
,
len
(
r
))])
# For vector send it to single_pair
res
=
pr_noshare
[:,
0
]
+
f_share
return
res
.
astype
(
np
.
float
)
def
pairwise_ll01
(
l
,
r
,
C
,
sigma
):
'''Pairwise Likelihood function only caring about block or not'''
print
(
"C: %.5f"
%
C
)
print
(
"Sigma: %.4f"
%
sigma
)
if
C
<=
0
or
sigma
<=
0
:
return
np
.
zeros_like
(
l
)
# If Parameters do not make sense.
else
:
lambd
=
(
C
*
r
/
(
np
.
sqrt
(
2
*
l0
)
*
sigma
)
*
kv
(
1
,
np
.
sqrt
(
2
*
l0
)
*
r
/
sigma
))
# Probability of sharing, vectorized.
pr_noshare
=
np
.
exp
(
-
lambd
)
# Probabilities of no sharing
l
=
[
len
(
l
[
i
])
if
l
[
i
]
!=
0
else
0
for
i
in
range
(
0
,
len
(
r
))]
# Number of shared blocks
pr_share
=
np
.
array
([(
lambd
[
i
]
**
l
[
i
])
/
math
.
factorial
(
l
[
i
])
if
l
[
i
]
!=
0
else
1
for
i
in
range
(
0
,
len
(
r
))])
res
=
pr_noshare
[:,
0
]
*
pr_share
# Bring together the two terms
return
res
.
astype
(
np
.
float
)
class
MLE_estimation
(
GenericLikelihoodModel
):
def
__init__
(
self
,
endog
,
exog
,
**
kwds
):
super
(
MLE_estimation
,
self
).
__init__
(
endog
,
exog
,
**
kwds
)
def
nloglikeobs
(
self
,
params
):
C
=
params
[
0
]
sigma
=
params
[
1
]
p_ll
=
pairwise_ll
(
self
.
endog
,
self
.
exog
,
C
,
sigma
)
nll
=
-
p_ll
# First is length of shared block, second is pairwise distance
return
nll
def
fit
(
self
,
start_params
=
None
,
maxiter
=
10000
,
maxfun
=
5000
,
**
kwds
):
# we have one additional parameter and we need to add it for summary
if
start_params
==
None
:
start_params
=
np
.
array
([
0.02
,
2
])
return
super
(
MLE_estimation
,
self
).
fit
(
start_params
=
start_params
,
maxiter
=
maxiter
,
maxfun
=
maxfun
,
**
kwds
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment