Projekt

Obecné

Profil

Stáhnout (4.01 KB) Statistiky
| Větev: | Revize:
1
from Utilities import FolderProcessor, ConfigureFunctions
2
from Utilities.Database import DatabaseLoader
3

    
4
# Path to crawled data
5
CRAWLED_DATA_PATH = "CrawledData/"
6
# Path to processed data
7
PROCESSED_DATA_PATH = "ProcessedData/"
8
# Path to dataset crawler implementations
9
CRAWLER_LIB_PATH = "DatasetCrawler."
10
# Path to dataset processor implementations
11
PROCESSOR_LIB_PATH = "DatasetProcessing."
12

    
13

    
14
def crawl_data(config):
15
    """
16
      Imports dataset crawler in DatasetCrawler/"dataset_name"Crawler.py
17
      runs crawler.
18

    
19
    Args:
20
        config: loaded configuration file of dataset
21
    """
22
    dataset_name = config["dataset-name"]
23

    
24
    my_function = __import__(CRAWLER_LIB_PATH + dataset_name + "Crawler", globals(), locals(), ['crawl']).crawl
25
    my_function(config)
26

    
27
    dataset_name += '/'
28

    
29

    
30
def process_data(dataset_name):
31
    """
32
    Goes trough every not processed file(not contained in CrawledData/"dataset_name"/ignore.txt)
33
    Imports dataset processor in DatasetProcessing/"dataset_name"Processor.py
34
    Runs processor on every file
35
    After successful processing updates ignore.txt
36

    
37
    Args:
38
        dataset_name: name of dataset that has existing configuration file
39
    """
40
    dataset_path = dataset_name + '/'
41

    
42
    process_file_func = __import__(PROCESSOR_LIB_PATH + dataset_name + "Processor", globals(), locals(),
43
                                   ['process_file']).process_file
44

    
45
    not_processed_files = FolderProcessor.list_of_all_files(CRAWLED_DATA_PATH + dataset_path)
46

    
47
    for not_processed_file in not_processed_files:
48
        process_file_func(CRAWLED_DATA_PATH + dataset_path + not_processed_file)
49
        FolderProcessor.update_ignore_set(CRAWLED_DATA_PATH + dataset_path, not_processed_file)
50

    
51

    
52
def validate_process_data(config):
53
    """
54
    Function goes through newly processed data and checks theirs status
55

    
56
    Args:
57
        config: loaded configuration file of dataset
58

    
59
    Returns:
60
        boolean variable TRUE/FALSE.
61
        Data processed correctly - TRUE
62
        Wrong format or NEW unknown devices - FALSE
63
    """
64
    processed_devices_set = FolderProcessor.get_devices_set(PROCESSED_DATA_PATH + config["dataset-name"] + '/')
65
    unknown_devices_set = FolderProcessor.get_unknown_devices_set(config, processed_devices_set)
66
    unknown_devices_size = len(unknown_devices_set)
67

    
68
    if unknown_devices_size != 0:
69
        print("There is " + str(unknown_devices_size) + " unknown devices")
70
        ConfigureFunctions.update_configuration(config["dataset-name"], unknown_devices_set)
71
        return False
72

    
73

    
74
def load_data_to_database(config):
75
    """
76
    Goes trough every not loaded file(not contained in ProcessedData/ignore.txt)
77
    loads data appends coordination from configurations
78
    and exports it into the database
79
    After successful exporting updates ignore.txt
80

    
81
    Args:
82
        config: loaded configuration file of dataset
83
    """
84
    dataset_name = config["dataset-name"]
85
    dataset_path = dataset_name + '/'
86

    
87
    # get all unprocessed files from dataset
88
    not_loaded_files = FolderProcessor.list_of_all_files(PROCESSED_DATA_PATH + dataset_path)
89

    
90
    # load every file
91
    for not_loaded_file in not_loaded_files:
92
        # load processed data
93
        processed_data = DatabaseLoader.get_data_from_file(not_loaded_file, config)
94
        # load processed data to database
95
        DatabaseLoader.load_data_to_database(dataset_name, processed_data)
96
        FolderProcessor.update_ignore_set(PROCESSED_DATA_PATH + dataset_path, not_loaded_file)
97

    
98

    
99
def run_full_pipeline(dataset_name):
100
    """
101
    Loads config file and starts full pipeline
102
    -crawl data
103
    -process data
104
    -load data to database
105

    
106
    Args:
107
        dataset_name: name of dataset that has existing configuration file
108
    """
109
    config = ConfigureFunctions.load_configuration(dataset_name)
110
    crawl_data(config)
111
    process_data(config["dataset-name"])
112

    
113
    validation_test = validate_process_data(config)
114

    
115
    if validation_test:
116
        load_data_to_database(config)
117

    
(1-1/3)