Projekt

Obecné

Profil

Stáhnout (2.81 KB) Statistiky
| Větev: | Revize:
1
import os
2

    
3
CRAWLED_DATA_PATH = "../CrawledData/"
4
PROCESSED_DATA_PATH = "../ProcessedData/"
5
CRAWLER_LOGS_PATH = "../CrawlerLogs/"
6
CRAWLER_PROGRAM_PATH = "../DatasetCrawler"
7
PROCESSOR_PROGRAM_PATH = "../DatasetProcessing"
8
CONFIG_FILES_PATH = "../DatasetConfigs"
9

    
10

    
11
def create_default_config_file(dataset_name):
12

    
13
    with open(CONFIG_FILES_PATH + "/" + dataset_name + ".yaml", "w") as file:
14
        file.write("# jmeno datasetu, pod kterym bude zobrazen v aplikaci\n")
15
        file.write("dataset-name: " + dataset_name + "\n")
16
        file.write("# root slozka, ktera obsahuje odkazy na dataset\n")
17
        file.write("url: ZDE VLOZTE URL/\n")
18
        file.write("# volitelny parameter, ktery specifikuje vzor jmrna datasetu, ktera se budou stahovat\n")
19
        file.write("regex: ZDE VLOZTE REGEX\n")
20
        file.write("# volitelny parametr, ktery udava jak casto se budou hledat nove datasety, pokud prazdne, "
21
                   "tak defaultni hodnota (dny)\n")
22
        file.write("update-period: ZDE VLOZTE HODNOTU\n")
23
        file.write("# pozice jednotlivych zarizeni, ktera jsou v datasetu\n")
24
        file.write("devices:\n")
25

    
26

    
27
def create_default_processor(dataset_name):
28
    with open(PROCESSOR_PROGRAM_PATH + "/" + dataset_name + "Processor.py", "w") as file:
29
        file.write("def process_file(filename):\n")
30
        file.write("    print(\"You must implements process_file method first!\")\n")
31

    
32

    
33
def create_default_crawler(dataset_name):
34

    
35
    with open(CRAWLER_PROGRAM_PATH + "/" + dataset_name + "Crawler.py", "w") as file:
36
        file.write("def crawl(config):\n")
37
        file.write("    print(\"You must implements Crawl method first!\")\n")
38

    
39

    
40
def create_ignore_file(path,text):
41

    
42
    with open(path + "/ignore.txt", "w") as file:
43
        if text is not None:
44
            file.write(text + "\n")
45

    
46

    
47
def prepare_dataset_structure(dataset_name):
48
    jump_folder = "../"
49

    
50
    # create folder for crawled data
51
    try:
52
        path = CRAWLED_DATA_PATH+dataset_name
53
        os.mkdir(path)
54
        create_ignore_file(path,"ignore.txt")
55
    except os.error as e:
56
        print(e)
57
        print("Creation of the directory %s failed" % path)
58

    
59
    # create folder for processed data
60
    try:
61
        path = PROCESSED_DATA_PATH + dataset_name
62
        os.mkdir(path)
63
        create_ignore_file(path, "ignore.txt")
64
    except OSError:
65
        print("Creation of the directory %s failed" % path)
66

    
67
    # create folder for crawler logs
68
    try:
69
        path = CRAWLER_LOGS_PATH + dataset_name
70
        os.mkdir(path)
71
        create_ignore_file(path, None)
72
    except OSError:
73
        print("Creation of the directory %s failed" % path)
74

    
75
    create_default_crawler(dataset_name)
76
    create_default_processor(dataset_name)
77
    create_default_config_file(dataset_name)
78

    
79

    
80
prepare_dataset_structure("WIFI")
    (1-1/1)