Commit 37896fc8 authored by Muhammad Salman Shahid's avatar Muhammad Salman Shahid
Browse files

Fixed clash of time.time and datetime.time

parent ea3fcddb
......@@ -12,7 +12,10 @@ if __name__ == "__main__":
print(datetime.now().time())
train_1min, val_1min, test_1min = split_data(dataframe=AMPDs_dataframe, split_by_look_back=True)
# train_1min, val_1min, test_1min = split_data(dataframe=AMPDs_dataframe, split_by_look_back=True)
train_1min, val_1min, test_1min = split_data(dataframe=AMPDs_dataframe, validation_days=28, test_days=14,
split_by_look_back=True)
appliances = list(train_1min.columns.values[1:])
......
import time
import time as tm
from sklearn.tree import DecisionTreeRegressor
from .data_eng import *
......@@ -15,7 +15,7 @@ def prediction_using_decision_trees(train_dataframe, val_dataframe, test_datafra
:param sample_split:
:return:
"""
start = time.time()
start = tm.time()
prediction_with_min_loss = {}
......@@ -59,7 +59,7 @@ def prediction_using_decision_trees(train_dataframe, val_dataframe, test_datafra
prediction_with_min_loss[appliance] = list_classifiers[min_loss_index].predict(X_test)
print('Training time: ', (time.time() - start) / 60)
print('Training time: ', (tm.time() - start) / 60)
return min_val_loss, prediction_with_min_loss
......
from time import time
from keras.layers.core import Dense, Activation, Dropout
from keras.layers.recurrent import LSTM
from keras.models import Sequential
......@@ -6,15 +8,46 @@ from keras.models import load_model
from keras.optimizers import Adam
from keras.regularizers import l2
def build_fc_model(layers):
from .data_eng import *
def nn_model(train_dataframe, val_dataframe, test_dataframe, appliances, layers, sampling, ):
fc_model = Sequential()
for i in range(len(layers)-1):
fc_model.add( Dense(input_dim=layers[i], output_dim= layers[i+1]) )#, W_regularizer=l2(0.1)) )
fc_model.add( Dropout(0.5) )
for i in range(len(layers) - 1):
fc_model.add(Dense(input_dim=layers[i], output_dim=layers[i + 1])) # , W_regularizer=l2(0.1)) )
fc_model.add(Dropout(0.5))
if i < (len(layers) - 2):
fc_model.add( Activation('relu') )
fc_model.add(Activation('relu'))
fc_model.summary()
return fc_model
####################################################################################################
start = time()
prediction_with_min_loss = {}
min_val_loss = {}
for appliance in appliances:
print("Appliance: {}, Sampling: {}".format(appliance, sampling))
list_classifiers = []
losses = []
X_train = train_dataframe.loc[:, train_dataframe.columns[0]].values.reshape(-1, 1)
X_val = val_dataframe.loc[:, val_dataframe.columns[0]].values.reshape(-1, 1)
X_test = test_dataframe.loc[:, test_dataframe.columns[0]].values.reshape(-1, 1)
y_train = train_dataframe[appliance].values
y_val = val_dataframe[appliance].values
adam = Adam(lr=1e-5)
fc_model.compile(loss='mean_squared_error', optimizer=adam)
start = time()
checkpointer = ModelCheckpoint(filepath="./fc_refrig_h1_2.hdf5", verbose=0, save_best_only=True)
hist_fc = fc_model.fit(X_train, y_train,
batch_size=512, verbose=1, nb_epoch=200,
validation_split=0.33, callbacks=[checkpointer])
print('Finish training. Time: ', time() - start)
###################################################################################################
return fc_model
import time
import time as tm
from sklearn.ensemble import RandomForestRegressor
......@@ -16,7 +16,7 @@ def prediction_using_random_forest(train_dataframe, val_dataframe, test_datafram
:param sample_split:
:return:
"""
start = time.time()
start = tm.time()
prediction_with_min_loss = {}
......@@ -60,7 +60,7 @@ def prediction_using_random_forest(train_dataframe, val_dataframe, test_datafram
prediction_with_min_loss[appliance] = list_classifiers[min_loss_index].predict(X_test)
print('Training time: ', (time.time() - start) / 60)
print('Training time: ', (tm.time() - start) / 60)
return min_val_loss, prediction_with_min_loss
......
......@@ -13,18 +13,24 @@ if __name__ == "__main__":
print(datetime.now().time())
train_1min, val_1min, test_1min = split_data(dataframe=AMPDs_dataframe, split_by_look_back=True)
# train_1min, val_1min, test_1min = split_data(dataframe=AMPDs_dataframe, split_by_look_back=True)
train_1min, val_1min, test_1min = split_data(dataframe=AMPDs_dataframe, validation_days=28, test_days=14,
split_by_look_back=True)
appliances = list(train_1min.columns.values[1:])
min_samples_split = np.arange(2, 500, 10)
path_loss = "Random Forest/Losses"
min_val_loss_1min, prediction_with_min_loss_1min = prediction_using_random_forest(train_dataframe=train_1min,
val_dataframe=val_1min,
test_dataframe=test_1min,
appliances=appliances,
min_samples_split=min_samples_split,
sampling=sampling1)
sampling=sampling1,
path_loss=path_loss)
mse_losses_1min, mae_losses_1min = error_in_the_regression(prediction_with_min_loss=prediction_with_min_loss_1min,
test=test_1min, appliances=appliances)
......@@ -44,7 +50,8 @@ if __name__ == "__main__":
test_dataframe=test_10min,
appliances=appliances,
min_samples_split=min_samples_split,
sampling=sampling10)
sampling=sampling10,
path_loss=path_loss)
mse_losses_10min, mae_losses_10min = error_in_the_regression(
prediction_with_min_loss=prediction_with_min_loss_10min,
......@@ -65,7 +72,8 @@ if __name__ == "__main__":
test_dataframe=test_30min,
appliances=appliances,
min_samples_split=min_samples_split,
sampling=sampling30)
sampling=sampling30,
path_loss=path_loss)
mse_losses_30min, mae_losses_30min = error_in_the_regression(
prediction_with_min_loss=prediction_with_min_loss_30min,
......@@ -80,4 +88,5 @@ if __name__ == "__main__":
dict_30min = dicts_n_mins(test_dataframe=test_30min, prediction_dictionary=prediction_with_min_loss_30min,
appliances=appliances)
plot_subplots(dict_1min, dict_10min, dict_30min, appliances)
path = "Random Forest/Sampling comparison"
plot_subplots(dict_1min, dict_10min, dict_30min, appliances, path)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment