Advertisement
cesarsouza

Mixture HMM example

Jul 9th, 2011
345
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
C# 2.58 KB | None | 0 0
  1. using Accord.Statistics.Distributions.Univariate;
  2. using Accord.Statistics.Models.Markov;
  3. using Accord.Statistics.Models.Markov.Learning;
  4. using Accord.Statistics.Models.Markov.Topology;
  5.  
  6. using Multivariate = Accord.Statistics.Distributions.Multivariate;
  7.  
  8. namespace Test
  9. {
  10.  
  11. //...
  12.  
  13.     // Create a Continuous density Hidden Markov Model Sequence Classifier
  14.     // to detect a multivariate sequence and the same sequence backwards.
  15.     double[][][] sequences = new double[][][]
  16.     {
  17.         new double[][]
  18.         {
  19.             // This is the first  sequence with label = 0
  20.             new double[] { 0 },
  21.             new double[] { 1 },
  22.             new double[] { 2 },
  23.             new double[] { 3 },
  24.             new double[] { 4 },
  25.         },
  26.  
  27.         new double[][]
  28.         {
  29.                 // This is the second sequence with label = 1
  30.             new double[] { 4 },
  31.             new double[] { 3 },
  32.             new double[] { 2 },
  33.             new double[] { 1 },
  34.             new double[] { 0 },
  35.         }
  36.     };
  37.  
  38.     // Labels for the sequences
  39.     int[] labels = { 0, 1 };
  40.  
  41.  
  42.     // Create a mixture of two 1-dimensional normal distributions (by default,
  43.     // initialized with zero mean and unit covariance matrices).
  44.     var density = new Multivariate.Mixture<Multivariate.NormalDistribution>(
  45.         new Multivariate.NormalDistribution(1),
  46.         new Multivariate.NormalDistribution(1));
  47.  
  48.     // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
  49.     // and an underlying multivariate mixture of Normal distributions as density.
  50.     var classifier = new ContinuousSequenceClassifier(2, new Ergodic(2), density);
  51.  
  52.     // Configure the learning algorithms to train the sequence classifier
  53.     var teacher = new ContinuousSequenceClassifierLearning(classifier,
  54.  
  55.         // Train each model until the log-likelihood changes less than 0.0001
  56.         modelIndex => new ContinuousBaumWelchLearning(classifier.Models[modelIndex])
  57.         {
  58.             Tolerance = 0.0001,
  59.             Iterations = 0,
  60.         }
  61.     );
  62.  
  63.     // Train the sequence classifier using the algorithm
  64.     double logLikelihood = teacher.Run(sequences, labels);
  65.  
  66.  
  67.     // Calculate the probability that the given
  68.     //  sequences originated from the model
  69.     double likelihood1, likelihood2;
  70.  
  71.     // Try to classify the 1st sequence (output should be 0)
  72.     int c1 = classifier.Compute(sequences[0], out likelihood1);
  73.  
  74.     // Try to classify the 2nd sequence (output should be 1)
  75.     int c2 = classifier.Compute(sequences[1], out likelihood2);
  76.  
  77.     // ...
  78.  
  79. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement