diff --git a/config-cnn.json b/config-cnn.json index d08f937..38d8fe2 100644 --- a/config-cnn.json +++ b/config-cnn.json @@ -6,7 +6,8 @@ "data": { "filename": "./dataset/phi025-025_eta025-025_filtered.csv", "train_split": 0.70, - "normalise": true + "normalise": true, + "cilyndrical": true }, "training": { "epochs": 20, diff --git a/config-lstm-paralel.json b/config-lstm-paralel.json index e186770..9cd0ae8 100644 --- a/config-lstm-paralel.json +++ b/config-lstm-paralel.json @@ -6,7 +6,8 @@ "data": { "filename": "./dataset/phi025-025_eta025-025_filtered.csv", "train_split": 0.70, - "normalise": true + "normalise": true, + "cilyndrical": false }, "training": { "epochs": 20, diff --git a/config-lstm.json b/config-lstm.json index 1d46e17..7a57078 100644 --- a/config-lstm.json +++ b/config-lstm.json @@ -6,7 +6,8 @@ "data": { "filename": "./dataset/phi025-025_eta025-025_filtered.csv", "train_split": 0.70, - "normalise": true + "normalise": true, + "cilyndrical": false }, "training": { "epochs": 20, diff --git a/config.json b/config.json index 7594160..88b6c9c 100644 --- a/config.json +++ b/config.json @@ -6,19 +6,20 @@ "data": { "filename": "./dataset/phi025-025_eta025-025_filtered.csv", "train_split": 0.70, - "normalise": true + "normalise": true, + "cilyndrical": true }, "training": { "epochs": 20, "batch_size": 32, "save_model": true, - "load_model": true, + "load_model": false, "use_gpu": true }, "model": { "name": "lstm", "loss": "mse", - "optimizer": "RMSprop", + "optimizer": "rmsprop", "layers": [ { "type": "lstm", diff --git a/main-paralel.py b/main-paralel.py index f843dff..ef246ef 100644 --- a/main-paralel.py +++ b/main-paralel.py @@ -90,7 +90,8 @@ def main(): num_features = configs['model']['layers'][0]['input_features'] # the number of features of each hits split = configs['data']['train_split'] # the number of features of each hits - + cilyndrical = configs['data']['cilyndrical'] # set to polar or cartesian coordenates + normalise = configs['data']['normalise'] # config gpu #gpu() @@ -98,8 +99,8 @@ def main(): # prepare data set data = Dataset(data_dir, KindNormalization.Zscore) - X, X_, y = data.prepare_training_data(FeatureType.Divided, normalise=True, - cilyndrical=True) + X, X_, y = data.prepare_training_data(FeatureType.Divided, normalise=normalise, + cilyndrical=cilyndrical) # reshape data X = data.reshape3d(X, time_steps, num_features) diff --git a/main.py b/main.py index 9e26fb4..7c0aa28 100644 --- a/main.py +++ b/main.py @@ -95,7 +95,8 @@ def main(): num_features = configs['model']['layers'][0]['input_features'] # the number of features of each hits split = configs['data']['train_split'] # the number of features of each hits - + cilyndrical = configs['data']['cilyndrical'] # set to polar or cartesian coordenates + normalise = configs['data']['normalise'] # config gpu #gpu() @@ -103,11 +104,11 @@ def main(): # prepare data set data = Dataset(data_dir, KindNormalization.Zscore) - X, y = data.prepare_training_data(FeatureType.Positions, normalise=True, - cilyndrical=True) + X, y = data.prepare_training_data(FeatureType.Positions, normalise=normalise, + cilyndrical=cilyndrical) # reshape data - #X = data.reshape3d(X, time_steps, num_features) + X = data.reshape3d(X, time_steps, num_features) X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=1-split, random_state=42) @@ -129,8 +130,6 @@ def main(): model.build_model() - #x_train = [X_train, X_train_] - # in-memory training history = model.train( x=X_train, @@ -145,9 +144,7 @@ def main(): if not model.load_model(): print ('[Error] please change the config file : load_model') return - - #x_test = [X_test, X_test_] - + predicted = model.predict_one_hit(X_test) y_predicted = np.reshape(predicted, (predicted.shape[0]*predicted.shape[1], 1))