-
Notifications
You must be signed in to change notification settings - Fork 0
/
dummy_data.py
196 lines (161 loc) · 5.94 KB
/
dummy_data.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
import csv
import os
import json, time
from random import seed
from random import gauss
from random import random
from random import randint
from datetime import datetime
import pandas as pd
def file_path_info():
dir_path = os.getcwd()
dir_path = dir_path+"/templates/static/json_files/"
filename_json = dir_path+"json_column_2.json"
filename_csv = dir_path+"csv_column_2.csv"
return dir_path, filename_json, filename_csv
def create_data_point():
# seed random number generator
seed(datetime.now())
# list to store data
key_point = []
my_list_1 = []
my_list_2 = []
max_range = 50
# generate some Gaussian values
for _ in range(max_range):
value = gauss(0,1)
my_list_1.append(value)
key_point.append(_)
for _ in range(max_range):
value = random()
my_list_2.append(value)
return max_range, my_list_1, my_list_2, key_point
def create_int_data_point(n):
x = []
y = []
for _ in range(n):
x.append(randint(0, 11))
y.append(randint(0, 11))
return x, y
# following function in future will be used maybe
def check_if_string_in_file(file_name, string_to_search):
""" Check if any line in the file contains given string """
# Open the file in read only mode
with open(file_name, 'r') as read_obj:
# Read all lines in the file one by one
for line in read_obj:
# For each line, check if line contains the string
if string_to_search in line:
return True
return False
def make_file(filename):
with open(filename,'a', newline='') as file:
print("file_creation function is called")
file.close()
def make_row(filename):
# row_list = [["unit", "value1", "value2"]]
row_list = [["x", "y", "z"]]
with open(filename,'a', newline='') as file:
writer = csv.writer(file)
writer.writerows(row_list)
file.close()
def fill_data(filename):
max_range, my_list_1, my_list_2, key_point = create_data_point()
i = 0
while(max_range>i):
row_list = [[i, my_list_1[i], my_list_2[i]]]
with open(filename, 'a', newline='') as file:
writer = csv.writer(file)
writer.writerows(row_list)
i = i+1
file.close()
def do_process():
dir_path = os.getcwd()
dir_path = dir_path+"/templates/static/json_files/"
filename = dir_path+"csv_column_2.csv"
# check file is already here or ot. If yes then firstly removed and created a blank file
if not os.path.isfile(filename):
print("file is not here")
make_file(filename)
make_row(filename)
fill_data(filename)
else:
print("file already exists")
os.remove(filename)
print("file removed")
make_file(filename)
make_row(filename)
fill_data(filename)
def create_json():
dir_path_json = os.getcwd()
dir_path_json = dir_path_json+"/templates/static/json_files/"
filename_json = dir_path_json+"json_column_2.json"
max_range, list_1, list_2, key_point = create_data_point()
key_point = [element * 3 for element in key_point]
key_point_1 = [int(element*2) for element in key_point]
# print(key_point)
# lists = ['key_point','list_1', 'list_2']
# a new list is made from the former two to create JSON file
new_list_1 = [{'x': x, 'y1': y1, 'y2': y2} for x, y1, y2 in zip(key_point, list_1, list_2)]
new_list_2 = [{'x1': x1, 'x2': x2, 'y1': y1, 'y2': y2} for x1, x2, y1, y2 in zip(key_point, key_point_1, list_1, list_2)]
# heat_map_parent_list_xy = create_int_data_point(100) # this one only for plotly JS(2D histogram)
heat_map_parent_list_xy = [{'i': y1, 'q': y2} for y1, y2 in zip(*create_int_data_point(10))]
heat_map_child_list = make_heat_map_matrix(heat_map_parent_list_xy)
# if not os.path.isfile(filename_json):
# print("---------- hey json ------------- ")
# with open(filename_json, 'w') as outfile:
# json.dump(new_list, outfile, indent=4)
# else:
# os.remove(filename_json)
# print("-------- json removed----------")
# with open(filename_json, 'w') as outfile:
# json.dump(new_list, outfile, indent=4)
# print("new_list_1: ", new_list_2)
new_list = [new_list_1, new_list_2, heat_map_parent_list_xy]
return new_list
def make_heat_map_matrix(sample_list):
key_count_map={'i_{}:q_{}'.format(x, y):0 for x in range(65) for y in range(65)}
for item in sample_list:
key = 'i_{}:q_{}'.format(item['i'], item['q'])
key_count_map[key] = key_count_map[key] + 1
result = [{'x': float(key.rsplit(':')[0].rsplit('_')[1]), 'y':
float(key.rsplit(':')[1].rsplit('_')[1]), 'heat': value} for key, value in
key_count_map.items()]
return result
def load_json_1():
json_var = create_json()
return json_var
def load_json():
dir_path_json = os.getcwd()
dir_path_json = dir_path_json+"/templates/static/json_files/"
filename_json = dir_path_json+"json_column_2.json"
f = open(filename_json)
data = json.load(f)
return data
# goal of the following function is to convert csv data to json. but it changes the data type
def load_csv():
dir_path, filename_json, filename_csv = file_path_info()
dir_path_json = dir_path+"new_json.json"
df = pd.read_csv(filename_csv)
df.to_json(dir_path_json)
# dir_path_new_json = dir_path+"new_json.json"
# json_data = [json.dumps(d) for d in csv.DictReader(open(filename_csv))]
# return json_data
f = open(dir_path_json)
data = json.load(f)
# print(data)
return data
def load_table():
dir_path_json = os.getcwd()
dir_path_json = dir_path_json+"/templates/static/json_files/"
filename_json = dir_path_json+"table.json"
f = open(filename_json)
data = json.load(f)
return data
# if check_if_string_in_file(filename, 'unit'):
# print("file already here")
# fill_data()
# else:
# print("1st time here")
# make_row()
# fill_data()