Add params handling for training script
All checks were successful
s434704-training/pipeline/head This commit looks good
All checks were successful
s434704-training/pipeline/head This commit looks good
This commit is contained in:
parent
87d7867340
commit
1bbae86054
12
training.py
12
training.py
@ -1,3 +1,5 @@
|
||||
import sys
|
||||
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import tensorflow as tf
|
||||
@ -7,6 +9,10 @@ from tensorflow import keras
|
||||
from tensorflow.keras import layers
|
||||
from tensorflow.keras.layers.experimental import preprocessing
|
||||
|
||||
arguments = sys.argv[1:]
|
||||
|
||||
activation_func = [command.split('=')[1] for command in arguments if command.split('=')[0] == '--activation_func']
|
||||
|
||||
pd.set_option("display.max_columns", None)
|
||||
|
||||
# Wczytanie danych
|
||||
@ -26,9 +32,9 @@ else:
|
||||
model = keras.Sequential([
|
||||
keras.Input(shape=(len(columns_to_use),)),
|
||||
normalizer,
|
||||
layers.Dense(30, activation='relu'),
|
||||
layers.Dense(10, activation='relu'),
|
||||
layers.Dense(25, activation='relu'),
|
||||
layers.Dense(30, activation='relu' if len(activation_func) == 0 else activation_func[0]),
|
||||
layers.Dense(10, activation='relu' if len(activation_func) == 0 else activation_func[0]),
|
||||
layers.Dense(25, activation='relu' if len(activation_func) == 0 else activation_func[0]),
|
||||
layers.Dense(1)
|
||||
])
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user