-
Notifications
You must be signed in to change notification settings - Fork 0
/
dataframe_saver.py
executable file
·103 lines (82 loc) · 4.19 KB
/
dataframe_saver.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
# --------------------------------------------------------------------------- #
# Importing section
# --------------------------------------------------------------------------- #
import os
import sys
import argparse
import logging
import json
from influxdb import InfluxDBClient
from classes.inputs_gatherer import InputsGatherer
from classes.artificial_features import ArtificialFeatures
# --------------------------------------------------------------------------- #
# Functions
# -----------------------------------------------------------------------------#
# --------------------------------------------------------------------------- #
# Main
# --------------------------------------------------------------------------- #
if __name__ == "__main__":
# --------------------------------------------------------------------------- #
# Configuration file
# --------------------------------------------------------------------------- #
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("-c", help="configuration file")
arg_parser.add_argument("-t", help="type (MOR | EVE)")
arg_parser.add_argument("-l", help="log file (optional, if empty log redirected on stdout)")
args = arg_parser.parse_args()
# Load the main parameters
config_file = args.c
if os.path.isfile(config_file) is False:
print('\nATTENTION! Unable to open configuration file %s\n' % config_file)
sys.exit(1)
cfg = json.loads(open(args.c).read())
# Load the connections parameters and update the config dict with the related values
cfg_conns = json.loads(open(cfg['connectionsFile']).read())
cfg.update(cfg_conns)
# Define the forecast type
forecast_type = args.t
# --------------------------------------------------------------------------- #
# Set logging object
# --------------------------------------------------------------------------- #
if not args.l:
log_file = None
else:
log_file = args.l
logger = logging.getLogger()
logging.basicConfig(format='%(asctime)-15s::%(levelname)s::%(funcName)s::%(message)s', level=logging.INFO,
filename=log_file)
# --------------------------------------------------------------------------- #
# Starting program
# --------------------------------------------------------------------------- #
logger.info("Starting program")
# --------------------------------------------------------------------------- #
# InfluxDB connection
# --------------------------------------------------------------------------- #
logger.info('Connection to InfluxDb server on socket [%s:%s]' % (cfg['influxDB']['host'], cfg['influxDB']['port']))
try:
influx_client = InfluxDBClient(host=cfg['influxDB']['host'], port=cfg['influxDB']['port'],
password=cfg['influxDB']['password'], username=cfg['influxDB']['user'],
database=cfg['influxDB']['database'], ssl=cfg['influxDB']['ssl'])
except Exception as e:
logger.error('EXCEPTION: %s' % str(e))
sys.exit(3)
logger.info('Connection successful')
# Create the artificial features instance
artificial_features = ArtificialFeatures(influxdb_client=influx_client, forecast_type=forecast_type, cfg=cfg,
logger=logger)
# Create the inputs gatherer instance
inputs_gatherer = InputsGatherer(influxdb_client=influx_client, forecast_type=forecast_type, cfg=cfg, logger=logger,
artificial_features=artificial_features)
built_datasets = []
read_datasets = []
# Create all the possible signals for a given region
# inputs_gatherer.generate_all_signals()
# Build datasets for all the signal files given
for signal_file in cfg['datasetSettings']['signalsFiles']:
tmp_df = inputs_gatherer.build_dataset(signals_file=signal_file)
built_datasets.append(tmp_df)
# Read datasets from provided csv files
# for csv_file in cfg['datasetSettings']['csvFiles']:
# tmp_df = inputs_gatherer.read_dataset(csv_file=csv_file)
# read_datasets.append(tmp_df)
logger.info("Ending program")