Vous avez reçu un message "Your GitLab account has been locked ..." ? Pas d'inquiétude : lisez cet article https://docs.gricad-pages.univ-grenoble-alpes.fr/help/unlock/

Commit c83cfce1 authored by Muhammad Salman Shahid's avatar Muhammad Salman Shahid
Browse files

Modified the scripts for the file naming

parent 26426f9c
......@@ -8,7 +8,6 @@ from methods import split_data, prediction_using_decision_trees, error_in_the_re
if __name__ == "__main__":
sampling1 = "1 minute"
print("Disaggreagtion on {}".format(sampling1))
# train_1min, val_1min, test_1min = split_data(dataframe=AMPDs_dataframe)
print(datetime.now().time())
......@@ -24,7 +23,7 @@ if __name__ == "__main__":
min_samples_split = np.arange(2, 500, 10)
path_loss = "Decision Trees/Losses"
path = "Decision Trees"
min_val_loss_1min, prediction_with_min_loss_1min = prediction_using_decision_trees(train_dataframe=train_1min,
val_dataframe=val_1min,
......@@ -32,7 +31,7 @@ if __name__ == "__main__":
appliances=appliances,
min_samples_split=min_samples_split,
sampling=sampling1,
path_loss=path_loss,
path=path,
validation_days=validation_days,
test_days=test_days)
......@@ -55,7 +54,7 @@ if __name__ == "__main__":
appliances=appliances,
min_samples_split=min_samples_split,
sampling=sampling1,
path_loss=path_loss,
path=path,
validation_days=validation_days,
test_days=test_days)
mse_losses_10min, mae_losses_10min = error_in_the_regression(
......@@ -77,7 +76,7 @@ if __name__ == "__main__":
appliances=appliances,
min_samples_split=min_samples_split,
sampling=sampling1,
path_loss=path_loss,
path=path,
validation_days=validation_days,
test_days=test_days)
......@@ -92,5 +91,4 @@ if __name__ == "__main__":
dict_30min = dicts_n_mins(test_dataframe=test_30min, prediction_dictionary=prediction_with_min_loss_30min,
appliances=appliances)
path = "Decision Trees/Sampling comparison"
plot_subplots(dict_1min, dict_10min, dict_30min, appliances, path)
plot_subplots(dict_1min, dict_10min, dict_30min, appliances, path, validation_days, test_days)
import os
import time as tm
from sklearn.tree import DecisionTreeRegressor
......@@ -7,15 +6,19 @@ import joblib
from .data_eng import *
# using decision tree model on other appliances
def prediction_using_decision_trees(train_dataframe, val_dataframe, test_dataframe, appliances, min_samples_split,
sampling, path_loss, validation_days, test_days):
sampling, path, validation_days, test_days):
"""
:param train_dataframe:
:param val_dataframe:
:param test_dataframe:
:param sample_split:
:param appliances:
:param min_samples_split:
:param sampling:
:param path:
:param validation_days:
:param test_days:
:return:
"""
start = tm.time()
......@@ -40,8 +43,6 @@ def prediction_using_decision_trees(train_dataframe, val_dataframe, test_datafra
y_val = val_dataframe[appliance].values
for split in min_samples_split:
# print("Appliance: {}, Split: {}".format(appliance, split))
classifier = DecisionTreeRegressor(min_samples_split=split)
classifier.fit(X_train, y_train)
......@@ -54,7 +55,7 @@ def prediction_using_decision_trees(train_dataframe, val_dataframe, test_datafra
min_loss_index = np.argmin(losses)
plot_losses(losses, min_samples_split, sampling, appliance, path_loss)
plot_losses(losses, min_samples_split, sampling, appliance, path, validation_days, test_days)
print("Minimum mean square error loss for {} is {}".format(appliance, losses[min_loss_index]))
......@@ -62,37 +63,14 @@ def prediction_using_decision_trees(train_dataframe, val_dataframe, test_datafra
prediction_with_min_loss[appliance] = list_classifiers[min_loss_index].predict(X_test)
model_name = "{}_{}_{}_{}_v{}_t{}".format(path.split(" ")[0].lower(),
path.split(" ")[1].lower(), appliance, sampling,
validation_days, test_days)
# Save the model as a pickle in a file
joblib.dump(list_classifiers[min_loss_index],
"Decision Trees/Models/decision_trees_{}_{}_v{}_t{}.pkl".format(appliance, sampling,
validation_days, test_days))
"{}/Models/{}.pkl".format(path, model_name))
print('Training time: ', (tm.time() - start) / 60)
return min_val_loss, prediction_with_min_loss
def error_in_the_regression(prediction_with_min_loss, test, appliances):
"""
:param prediction_with_min_loss:
:param test:
:param appliances:
:return:
"""
mse_losses = {}
mae_losses = {}
for appliance in appliances:
mean_test_value = np.mean(test[appliance].values)
mse_losses[appliance] = mse_loss(prediction_with_min_loss[appliance], test[appliance].values)
mae_losses[appliance] = mae_loss(prediction_with_min_loss[appliance], test[appliance].values)
print('Appliance: {0}::::::: mean = {1:.2f} , mse = {2:.2f} - mae = {3:.2f}'.format(appliance, mean_test_value,
mse_losses[appliance],
mae_losses[appliance]))
return mse_losses, mae_losses
......@@ -9,7 +9,7 @@ from .data_eng import *
def prediction_using_neural_network(train_dataframe, val_dataframe, test_dataframe, appliances, layers, sampling,
model_path):
model_path, epochs):
model = Sequential()
for i in range(len(layers) - 1):
model.add(Dense(input_dim=layers[i], output_dim=layers[i + 1])) # , W_regularizer=l2(0.1)) )
......@@ -46,8 +46,8 @@ def prediction_using_neural_network(train_dataframe, val_dataframe, test_datafra
start = time()
# checkpointer = ModelCheckpoint(filepath="{}/{}.hdf5".format(model_path, appliances), monitor='val_loss',
# verbose=0, save_best_only=True, save_weights_only=True, mode='max')
model_fitting = model.fit(X_train, y_train, batch_size=512, verbose=1, nb_epoch=5, validation_split=0.33,
callbacks=[ModelCheckpoint(filepath="{}/{}.hdf5".format(model_path, appliances),
model_fitting = model.fit(X_train, y_train, batch_size=512, verbose=1, nb_epoch=epochs, validation_split=0.33,
callbacks=[ModelCheckpoint(filepath="{}/{}.hdf5".format(model_path, appliance),
monitor='val_loss', verbose=0, save_best_only=True,
save_weights_only=True, mode='max')])
......
......@@ -9,15 +9,7 @@ import joblib
# using decision tree model on other appliances
def prediction_using_random_forest(train_dataframe, val_dataframe, test_dataframe, appliances, min_samples_split,
sampling, path_loss, validation_days, test_days):
"""
:param train_dataframe:
:param val_dataframe:
:param test_dataframe:
:param sample_split:
:return:
"""
sampling, path, validation_days, test_days):
start = tm.time()
prediction_with_min_loss = {}
......@@ -54,45 +46,22 @@ def prediction_using_random_forest(train_dataframe, val_dataframe, test_datafram
min_loss_index = np.argmin(losses)
plot_losses(losses, min_samples_split, sampling, appliance, path_loss)
plot_losses(losses, min_samples_split, sampling, appliance, path, validation_days, test_days)
print("Minimum mean square error loss for {} is {}".format(appliance, losses[min_loss_index]))
min_val_loss[appliance] = losses[min_loss_index]
prediction_with_min_loss[appliance] = list_classifiers[min_loss_index].predict(X_test)
model_name = "{}_{}_{}_{}_v{}_t{}".format(path.split(" ")[0].lower(),
path.split(" ")[1].lower(), appliance, sampling,
validation_days, test_days)
# Save the model as a pickle in a file
joblib.dump(list_classifiers[min_loss_index],
"Random Forest/Models/random_forest_{}_{}_v{}_t{}.pkl".format(appliance, sampling, validation_days,
test_days))
prediction_with_min_loss[appliance] = list_classifiers[min_loss_index].predict(X_test)
"{}/Models/{}.pkl".format(path, model_name))
print('Training time: ', (tm.time() - start) / 60)
return min_val_loss, prediction_with_min_loss
def error_in_the_regression(prediction_with_min_loss, test, appliances):
"""
:param prediction_with_min_loss:
:param test:
:param appliances:
:return:
"""
mse_losses = {}
mae_losses = {}
for appliance in appliances:
mean_test_value = np.mean(test[appliance].values)
mse_losses[appliance] = mse_loss(prediction_with_min_loss[appliance], test[appliance].values)
mae_losses[appliance] = mae_loss(prediction_with_min_loss[appliance], test[appliance].values)
print('Appliance: {0}::::::: mean = {1:.2f} , mse = {2:.2f} - mae = {3:.2f}'.format(appliance, mean_test_value,
mse_losses[appliance],
mae_losses[appliance]))
return mse_losses, mae_losses
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment