add nn script

This commit is contained in:
Yevhenii Poliakov 2023-05-14 17:47:03 +02:00
parent 4b9410f07b
commit c3282144cd
3 changed files with 52 additions and 3 deletions

View File

@ -39,9 +39,9 @@ pipeline {
echo("run data script")
//sh "source docker_ium/bin/activate"
sh "ls -a"
sh "chmod u+x script2.py"
sh "chmod u+x script4.py"
//sh "pip3 show pandas"
sh "python3 script3.py"
sh "python3 script4.py"
}
}

View File

@ -9,7 +9,7 @@ from tensorflow.keras.preprocessing.sequence import pad_sequences
from tensorflow.keras.utils import to_categorical
# Step 1: Data Preprocessing
df = pd.read_csv('25k_movies.csv.shuf') # Replace with the actual file name or path
df = pd.read_csv('25k_movies.csv.shuf')
text_data = df['review']
labels = df['sentiment']

49
script4.py Normal file
View File

@ -0,0 +1,49 @@
import pandas as pd
import numpy as np
import tensorflow as tf
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
# Step 1: Load the dataset
data = pd.read_csv('25k_movies.csv.shuf')
# Replace 'path_to_dataset.csv' with the actual path to your dataset file
# Step 2: Preprocess the data
features = ['Total Run Time', 'User Rating', 'Genres', 'Director Name', 'Writer Name']
target = 'Movie Rating'
data = data[features + [target]]
# Handle missing values if any
data = data.dropna()
# Convert categorical variables to numerical representations
data = pd.get_dummies(data, columns=['Genres', 'Director Name', 'Writer Name'])
# Split the data into features and target variables
X = data.drop(target, axis=1)
y = data[target]
# Split the data into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
# Standardize the feature data
scaler = StandardScaler()
X_train = scaler.fit_transform(X_train)
X_test = scaler.transform(X_test)
# Step 3: Build and train the neural network model
model = tf.keras.Sequential([
tf.keras.layers.Dense(64, activation='relu', input_shape=(X_train.shape[1],)),
tf.keras.layers.Dense(32, activation='relu'),
tf.keras.layers.Dense(1)
])
model.compile(optimizer='adam', loss='mean_squared_error')
model.fit(X_train, y_train, epochs=10, batch_size=32)
# Step 4: Evaluate the model
y_pred = model.predict(X_test)
mse = np.mean((y_pred - y_test)**2)
print(f"Mean Squared Error (MSE): {mse}")