Projekt

Obecné

Profil

Stáhnout (3.58 KB) Statistiky
| Větev: | Tag: | Revize:
1
//
2
// Author: Roman Kalivoda
3
//
4

    
5
using System;
6
using System.Collections.Generic;
7
using ServerApp.Connection.XMLProtocolHandler;
8
using ServerApp.Parser.Parsers;
9

    
10
namespace ServerApp.Predictor
11
{
12
    /// <summary>
13
    /// Implentation of the <c>IPredicitionController</c> interface.
14
    /// </summary>
15
    class PredictionController : IPredictionController
16
    {
17
        /// <summary>
18
        /// A dictionary for storing trained predictors.
19
        /// </summary>
20
        private Dictionary<string, int> buildingsToAreas;
21

    
22
        private List<IPredictor> predictors;
23

    
24
        /// <summary>
25
        /// A reference to a data parser.
26
        /// </summary>
27
        private IDataParser dataParser;
28

    
29
        /// <summary>
30
        /// A feature extractor instance.
31
        /// </summary>
32
        private FeatureExtractor featureExtractor;
33

    
34
        /// <summary>
35
        /// Instantiates new prediction controller.
36
        /// </summary>
37
        /// <param name="dataParser">A data parser used to get training data.</param>
38
        public PredictionController(IDataParser dataParser)
39
        {
40
            this.dataParser = dataParser;
41
            this.predictors = new List<IPredictor>();
42
            this.buildingsToAreas = new Dictionary<string, int>();
43
            this.featureExtractor = new FeatureExtractor(this.dataParser, buildingsToAreas);
44

    
45
            // fill predictors with all available locationKeys
46
            // TODO Currently all locations use the same predictor. Try dividing locations into subareas with separate predictors.
47
            var locationKeys = TagInfo.buildings;
48
            foreach (string key in locationKeys)
49
            {
50
                buildingsToAreas.Add(key, 0);
51
            }
52
            IPredictor predictor = new NaiveBayesClassifier();
53
            predictors.Add(predictor);
54
        }
55
        public List<string> GetPredictors()
56
        {
57
            return new List<string>(buildingsToAreas.Keys);
58
        }
59

    
60
        public void Load(string locationKey = null, string path = null)
61
        {
62
            if (locationKey is null)
63
            {
64
                throw new NotImplementedException();
65
            }
66
            else
67
            {
68
                throw new NotImplementedException();
69
            }
70
        }
71

    
72
        public Response Predict(Request request)
73
        {
74
            throw new NotImplementedException();
75
        }
76

    
77

    
78
        public void Train(string locationKey = null)
79
        {
80
            if (locationKey is null)
81
            // train all predictors
82
            {
83
                // TODO A single predictor is used for all areas, so training is done only once now.
84
                for (int i = 0; i < this.predictors.Count; i++)
85
                {
86
                    // train on all available data
87
                    // TODO the train/test split is used just temporarily for demonstration.
88
                    List<ModelInput> data = featureExtractor.PrepareTrainingInput(i, DateTime.MinValue, DateTime.MaxValue);
89
                    List<ModelInput> trainingData = data.GetRange(index: 0, count: 500);
90
                    List<ModelInput> testData = data.GetRange(index: 500, count: 94);
91
                    Console.WriteLine("Training predictor with {0} samples.", trainingData.Count);
92
                    this.predictors[i].Fit(trainingData);
93

    
94
                    Console.WriteLine("Evaluating predictor with {0} samples.", testData.Count);
95
                    this.predictors[i].Evaluate(testData);
96
                }
97
            }
98
            else
99
            // train specified predictor only
100
            {
101
                throw new NotImplementedException();
102
            }
103
        }
104

    
105

    
106
    }
107
}
(7-7/7)