88from mpest .core .mixture_distribution import MixtureDistribution
99from mpest .core .problem import Problem , Result
1010from mpest .em .methods .abstract_steps import AExpectation , AMaximization
11- from mpest .exceptions import SampleError
12- from mpest .models import AModel , AModelDifferentiable
11+ from mpest .models import AModel , AModelDifferentiable , Uniform
1312from mpest .optimizers import AOptimizerJacobian , TOptimizer
1413from mpest .utils import ResultWithError
1514
@@ -31,16 +30,8 @@ def step(self, problem: Problem) -> EResult:
3130 samples = problem .samples
3231 mixture = problem .distributions
3332 p_xij = []
34- active_samples = []
3533 for x in samples :
36- p = np .array ([d .model .pdf (x , d .params ) for d in mixture ])
37- if np .any (p ):
38- p_xij .append (p )
39- active_samples .append (x )
40-
41- if not active_samples :
42- error = SampleError ("None of the elements in the sample is correct for this mixture" )
43- return ResultWithError (mixture , error )
34+ p_xij .append (np .array ([d .model .pdf (x , d .params ) for d in mixture ]))
4435
4536 # h[j, i] contains probability of X_i to be a part of distribution j
4637 m = len (p_xij )
@@ -56,7 +47,7 @@ def step(self, problem: Problem) -> EResult:
5647
5748 h [:, i ] = wp / swp
5849
59- return active_samples , h , problem
50+ return samples , h , problem
6051
6152
6253# class ML(AExpectation[EResult]):
@@ -109,8 +100,30 @@ def step(self, e_result: EResult) -> Result:
109100 for j , ch in enumerate (h [:]):
110101 d = mixture [j ]
111102
103+ if isinstance (d .model , Uniform ):
104+ threshold = 1e-2
105+ curr_a , curr_b = d .params
106+ relevant_indices = np .where (ch > threshold )[0 ]
107+ if len (relevant_indices ) == 0 :
108+ new_params = d .params
109+ else :
110+ relevant_samples = np .array (samples )[relevant_indices ]
111+
112+ new_a = np .min (relevant_samples )
113+ new_b = np .max (relevant_samples )
114+
115+ new_params = np .array ([new_a , new_b ])
116+
117+ new_distributions .append (Distribution (d .model , new_params ))
118+ continue
119+
112120 def log_likelihood (params , ch , model : AModel ):
113- return - np .sum (ch * [model .lpdf (x , params ) for x in samples ])
121+ Y = np .array ([model .lpdf (x , params ) for x in samples ])
122+ penalty = 1e20
123+ weighted_neg_lpdf = [- c * y if y != - np .inf else penalty * c for y , c in zip (Y , ch )]
124+ output = np .sum (weighted_neg_lpdf )
125+
126+ return output
114127
115128 def jacobian (params , ch , model : AModelDifferentiable ):
116129 return - np .sum (
0 commit comments