Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- var h = new MultivariateNormalDistribution[2];
- for (int i = 0; i < h.Length; i++)
- {
- h[i] = new MultivariateNormalDistribution(6);
- }
- var Hmm = new HiddenMarkovClassifier<MultivariateMixture<MultivariateNormalDistribution>>(classes.Count,
- new Forward(states), new MultivariateMixture<MultivariateNormalDistribution>(h), classes.ToArray());
- // I tried to change the topology from Forward to Ergodic and the performence is nearly the same
- // Ergodic models are commonly used to represent models in which a single (large) sequence of observations is used for training (such as when a training sequence does not have well defined starting and ending points and can potentially be infinitely long).
- // Create the learning algorithm for the ensemble classifier
- var teacher = new HiddenMarkovClassifierLearning<MultivariateMixture<MultivariateNormalDistribution>>(Hmm,
- // Train each model using the selected convergence criteria
- i => new BaumWelchLearning<MultivariateMixture<MultivariateNormalDistribution>>(Hmm.Models[i])
- {
- Tolerance = tolerance,
- Iterations = iterations,
- FittingOptions = new MixtureOptions()
- {
- InnerOptions = new NormalOptions()
- {
- Diagonal = false
- },
- Logarithm = true,
- Threshold = 1e-5
- // , Diagonal = true
- }
- }
- );
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement