73 KiB
73 KiB
Imports
import numpy as np
import keras
import os
import tensorflow as tf
from keras.layers import LSTM, Dense, Input
from nltk.translate.bleu_score import sentence_bleu, SmoothingFunction
from tqdm import tqdm
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
Check GPU
physical_devices = tf.config.experimental.list_physical_devices('GPU')
print("Num GPUs Available: ", len(physical_devices))
Num GPUs Available: 1
print(tf.config.list_physical_devices('GPU'))
[PhysicalDevice(name='/physical_device:GPU:0', device_type='GPU')]
Define the model parameters
batch_size = 16
epochs = 50
latent_dim = 256
num_samples = 40000
data_path = "pol-eng/pol.txt"
Read the data
input_texts = []
target_texts = []
input_characters = set()
target_characters = set()
with open(data_path, "r", encoding="utf-8") as f:
lines = f.read().split("\n")
for line in lines[: min(num_samples, len(lines) - 1)]:
input_text, target_text, _ = line.split("\t")
target_text = "\t" + target_text + "\n"
input_texts.append(input_text)
target_texts.append(target_text)
for char in input_text:
if char not in input_characters:
input_characters.add(char)
for char in target_text:
if char not in target_characters:
target_characters.add(char)
input_characters = sorted(list(input_characters))
target_characters = sorted(list(target_characters))
num_encoder_tokens = len(input_characters)
num_decoder_tokens = len(target_characters)
max_encoder_seq_length = max([len(txt) for txt in input_texts])
max_decoder_seq_length = max([len(txt) for txt in target_texts])
print("Number of samples:", len(input_texts))
print("Number of unique input tokens:", num_encoder_tokens)
print("Number of unique output tokens:", num_decoder_tokens)
print("Max sequence length for inputs:", max_encoder_seq_length)
print("Max sequence length for outputs:", max_decoder_seq_length)
Number of samples: 40000 Number of unique input tokens: 76 Number of unique output tokens: 99 Max sequence length for inputs: 39 Max sequence length for outputs: 68
Define the input and target data
input_token_index = dict([(char, i) for i, char in enumerate(input_characters)])
target_token_index = dict([(char, i) for i, char in enumerate(target_characters)])
encoder_input_data = np.zeros(
(len(input_texts), max_encoder_seq_length, num_encoder_tokens),
dtype="float32",
)
decoder_input_data = np.zeros(
(len(input_texts), max_decoder_seq_length, num_decoder_tokens),
dtype="float32",
)
decoder_target_data = np.zeros(
(len(input_texts), max_decoder_seq_length, num_decoder_tokens),
dtype="float32",
)
for i, (input_text, target_text) in enumerate(zip(input_texts, target_texts)):
for t, char in enumerate(input_text):
encoder_input_data[i, t, input_token_index[char]] = 1.0
encoder_input_data[i, t + 1 :, input_token_index[" "]] = 1.0
for t, char in enumerate(target_text):
decoder_input_data[i, t, target_token_index[char]] = 1.0
if t > 0:
decoder_target_data[i, t - 1, target_token_index[char]] = 1.0
decoder_input_data[i, t + 1 :, target_token_index[" "]] = 1.0
decoder_target_data[i, t:, target_token_index[" "]] = 1.0
Define the model
encoder_inputs = Input(shape=(None, num_encoder_tokens))
encoder = LSTM(latent_dim, return_state=True)
encoder_outputs, state_h, state_c = encoder(encoder_inputs)
encoder_states = [state_h, state_c]
decoder_inputs = Input(shape=(None, num_decoder_tokens))
decoder_lstm = LSTM(latent_dim, return_sequences=True, return_state=True)
decoder_outputs, _, _ = decoder_lstm(decoder_inputs, initial_state=encoder_states)
decoder_dense = Dense(num_decoder_tokens, activation="softmax")
decoder_outputs = decoder_dense(decoder_outputs)
Train the model
model = keras.Model([encoder_inputs, decoder_inputs], decoder_outputs)
model.compile(optimizer="rmsprop", loss="categorical_crossentropy", metrics=["accuracy"])
model.fit(
[encoder_input_data, decoder_input_data],
decoder_target_data,
batch_size=batch_size,
epochs=epochs,
validation_split=0.2,
)
model.save("s2s_model.keras")
Epoch 1/50 2000/2000 [==============================] - 46s 19ms/step - loss: 0.7914 - accuracy: 0.7804 - val_loss: 0.9906 - val_accuracy: 0.7111 Epoch 2/50 2000/2000 [==============================] - 34s 17ms/step - loss: 0.5680 - accuracy: 0.8332 - val_loss: 0.8399 - val_accuracy: 0.7529 Epoch 3/50 2000/2000 [==============================] - 35s 17ms/step - loss: 0.4870 - accuracy: 0.8568 - val_loss: 0.7561 - val_accuracy: 0.7769 Epoch 4/50 2000/2000 [==============================] - 35s 17ms/step - loss: 0.4405 - accuracy: 0.8702 - val_loss: 0.7148 - val_accuracy: 0.7894 Epoch 5/50 2000/2000 [==============================] - 37s 18ms/step - loss: 0.4091 - accuracy: 0.8794 - val_loss: 0.6904 - val_accuracy: 0.7963 Epoch 6/50 2000/2000 [==============================] - 35s 18ms/step - loss: 0.3858 - accuracy: 0.8860 - val_loss: 0.6719 - val_accuracy: 0.8025 Epoch 7/50 2000/2000 [==============================] - 35s 18ms/step - loss: 0.3671 - accuracy: 0.8912 - val_loss: 0.6604 - val_accuracy: 0.8064 Epoch 8/50 2000/2000 [==============================] - 37s 18ms/step - loss: 0.3519 - accuracy: 0.8957 - val_loss: 0.6514 - val_accuracy: 0.8100 Epoch 9/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.3390 - accuracy: 0.8994 - val_loss: 0.6477 - val_accuracy: 0.8112 Epoch 10/50 2000/2000 [==============================] - 35s 17ms/step - loss: 0.3282 - accuracy: 0.9025 - val_loss: 0.6460 - val_accuracy: 0.8125 Epoch 11/50 2000/2000 [==============================] - 35s 17ms/step - loss: 0.3180 - accuracy: 0.9052 - val_loss: 0.6436 - val_accuracy: 0.8137 Epoch 12/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.3092 - accuracy: 0.9079 - val_loss: 0.6467 - val_accuracy: 0.8140 Epoch 13/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.3013 - accuracy: 0.9100 - val_loss: 0.6468 - val_accuracy: 0.8143 Epoch 14/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.2941 - accuracy: 0.9124 - val_loss: 0.6478 - val_accuracy: 0.8149 Epoch 15/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.2876 - accuracy: 0.9140 - val_loss: 0.6510 - val_accuracy: 0.8154 Epoch 16/50 2000/2000 [==============================] - 38s 19ms/step - loss: 0.2814 - accuracy: 0.9159 - val_loss: 0.6575 - val_accuracy: 0.8156 Epoch 17/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.2759 - accuracy: 0.9175 - val_loss: 0.6627 - val_accuracy: 0.8146 Epoch 18/50 2000/2000 [==============================] - 37s 18ms/step - loss: 0.2702 - accuracy: 0.9190 - val_loss: 0.6649 - val_accuracy: 0.8149 Epoch 19/50 2000/2000 [==============================] - 39s 19ms/step - loss: 0.2653 - accuracy: 0.9204 - val_loss: 0.6731 - val_accuracy: 0.8143 Epoch 20/50 2000/2000 [==============================] - 37s 19ms/step - loss: 0.2608 - accuracy: 0.9217 - val_loss: 0.6772 - val_accuracy: 0.8135 Epoch 21/50 2000/2000 [==============================] - 38s 19ms/step - loss: 0.2562 - accuracy: 0.9230 - val_loss: 0.6812 - val_accuracy: 0.8139 Epoch 22/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.2524 - accuracy: 0.9242 - val_loss: 0.6815 - val_accuracy: 0.8143 Epoch 23/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.2481 - accuracy: 0.9253 - val_loss: 0.6875 - val_accuracy: 0.8135 Epoch 24/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.2448 - accuracy: 0.9264 - val_loss: 0.6945 - val_accuracy: 0.8142 Epoch 25/50 2000/2000 [==============================] - 38s 19ms/step - loss: 0.2413 - accuracy: 0.9273 - val_loss: 0.6989 - val_accuracy: 0.8127 Epoch 26/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.2379 - accuracy: 0.9282 - val_loss: 0.7044 - val_accuracy: 0.8129 Epoch 27/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.2349 - accuracy: 0.9291 - val_loss: 0.7080 - val_accuracy: 0.8122 Epoch 28/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.2319 - accuracy: 0.9298 - val_loss: 0.7118 - val_accuracy: 0.8127 Epoch 29/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.2289 - accuracy: 0.9309 - val_loss: 0.7144 - val_accuracy: 0.8124 Epoch 30/50 2000/2000 [==============================] - 37s 18ms/step - loss: 0.2263 - accuracy: 0.9315 - val_loss: 0.7215 - val_accuracy: 0.8119 Epoch 31/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.2236 - accuracy: 0.9322 - val_loss: 0.7254 - val_accuracy: 0.8123 Epoch 32/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.2208 - accuracy: 0.9331 - val_loss: 0.7319 - val_accuracy: 0.8111 Epoch 33/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.2188 - accuracy: 0.9334 - val_loss: 0.7390 - val_accuracy: 0.8099 Epoch 34/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.2162 - accuracy: 0.9341 - val_loss: 0.7406 - val_accuracy: 0.8107 Epoch 35/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.2142 - accuracy: 0.9348 - val_loss: 0.7477 - val_accuracy: 0.8101 Epoch 36/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.2122 - accuracy: 0.9356 - val_loss: 0.7504 - val_accuracy: 0.8099 Epoch 37/50 2000/2000 [==============================] - 37s 18ms/step - loss: 0.2101 - accuracy: 0.9360 - val_loss: 0.7528 - val_accuracy: 0.8098 Epoch 38/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.2079 - accuracy: 0.9366 - val_loss: 0.7568 - val_accuracy: 0.8103 Epoch 39/50 2000/2000 [==============================] - 37s 19ms/step - loss: 0.2063 - accuracy: 0.9369 - val_loss: 0.7615 - val_accuracy: 0.8090 Epoch 40/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.2045 - accuracy: 0.9376 - val_loss: 0.7615 - val_accuracy: 0.8101 Epoch 41/50 2000/2000 [==============================] - 37s 19ms/step - loss: 0.2027 - accuracy: 0.9379 - val_loss: 0.7684 - val_accuracy: 0.8089 Epoch 42/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.2010 - accuracy: 0.9385 - val_loss: 0.7784 - val_accuracy: 0.8080 Epoch 43/50 2000/2000 [==============================] - 39s 20ms/step - loss: 0.1993 - accuracy: 0.9389 - val_loss: 0.7801 - val_accuracy: 0.8083 Epoch 44/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.1978 - accuracy: 0.9393 - val_loss: 0.7780 - val_accuracy: 0.8084 Epoch 45/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.1961 - accuracy: 0.9397 - val_loss: 0.7928 - val_accuracy: 0.8078 Epoch 46/50 2000/2000 [==============================] - 42s 21ms/step - loss: 0.1944 - accuracy: 0.9404 - val_loss: 0.7901 - val_accuracy: 0.8083 Epoch 47/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.1930 - accuracy: 0.9408 - val_loss: 0.7916 - val_accuracy: 0.8080 Epoch 48/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.1918 - accuracy: 0.9410 - val_loss: 0.7999 - val_accuracy: 0.8072 Epoch 49/50 2000/2000 [==============================] - 37s 18ms/step - loss: 0.1902 - accuracy: 0.9413 - val_loss: 0.8061 - val_accuracy: 0.8070 Epoch 50/50 2000/2000 [==============================] - 36s 18ms/step - loss: 0.1894 - accuracy: 0.9416 - val_loss: 0.8051 - val_accuracy: 0.8069
Load model
model = keras.models.load_model("s2s_model.keras")
Encoder model
encoder_inputs = model.input[0]
encoder_outputs, state_h_enc, state_c_enc = model.layers[2].output
encoder_states = [state_h_enc, state_c_enc]
encoder_model = keras.Model(encoder_inputs, encoder_states)
Decoder model
decoder_inputs = model.input[1]
decoder_state_input_h = keras.Input(shape=(latent_dim,))
decoder_state_input_c = keras.Input(shape=(latent_dim,))
decoder_states_inputs = [decoder_state_input_h, decoder_state_input_c]
decoder_lstm = model.layers[3]
decoder_outputs, state_h_dec, state_c_dec = decoder_lstm(
decoder_inputs, initial_state=decoder_states_inputs
)
decoder_states = [state_h_dec, state_c_dec]
decoder_dense = model.layers[4]
decoder_outputs = decoder_dense(decoder_outputs)
decoder_model = keras.Model(
[decoder_inputs] + decoder_states_inputs, [decoder_outputs] + decoder_states
)
Define the decoding sequence
reverse_input_char_index = dict((i, char) for char, i in input_token_index.items())
reverse_target_char_index = dict((i, char) for char, i in target_token_index.items())
def decode_sequence(input_seq):
states_value = encoder_model.predict(input_seq, verbose=0)
target_seq = np.zeros((1, 1, num_decoder_tokens))
target_seq[0, 0, target_token_index["\t"]] = 1.0
stop_condition = False
decoded_sentence = ""
while not stop_condition:
output_tokens, h, c = decoder_model.predict(
[target_seq] + states_value, verbose=0
)
sampled_token_index = np.argmax(output_tokens[0, -1, :])
sampled_char = reverse_target_char_index[sampled_token_index]
decoded_sentence += sampled_char
if sampled_char == "\n" or len(decoded_sentence) > max_decoder_seq_length:
stop_condition = True
target_seq = np.zeros((1, 1, num_decoder_tokens))
target_seq[0, 0, sampled_token_index] = 1.0
states_value = [h, c]
return decoded_sentence
Sample predictions
for seq_index in range(20):
input_seq = encoder_input_data[seq_index : seq_index + 1]
decoded_sentence = decode_sequence(input_seq)
print("-")
print("Input sentence:", input_texts[seq_index])
print("Decoded sentence:", decoded_sentence)
- Input sentence: Go. Decoded sentence: Idź się. - Input sentence: Hi. Decoded sentence: Cześć. - Input sentence: Run! Decoded sentence: Uciekaj! - Input sentence: Run. Decoded sentence: Biegnij. - Input sentence: Run. Decoded sentence: Biegnij. - Input sentence: Who? Decoded sentence: Kto? - Input sentence: Wow! Decoded sentence: Jak sprawdzi! - Input sentence: Wow! Decoded sentence: Jak sprawdzi! - Input sentence: Duck! Decoded sentence: Unik! - Input sentence: Fire! Decoded sentence: Staktowa! - Input sentence: Fire! Decoded sentence: Staktowa! - Input sentence: Fire! Decoded sentence: Staktowa! - Input sentence: Help! Decoded sentence: Pomocy! - Input sentence: Hide. Decoded sentence: Utekty to. - Input sentence: Jump! Decoded sentence: Samujesz! - Input sentence: Jump. Decoded sentence: Skok. - Input sentence: Stay. Decoded sentence: Zostań. - Input sentence: Stop! Decoded sentence: Zaczej się! - Input sentence: Stop! Decoded sentence: Zaczej się! - Input sentence: Wait! Decoded sentence: Czekajcie!
Compute BLEU score
def preprocess_text(text):
return text.strip().split()
def compute_bleu_score(target_texts, range_limit=1000):
candidate_corpus = []
references_corpus = []
for seq_index in tqdm(range(range_limit), desc="Calculating BLEU scores"):
input_seq = encoder_input_data[seq_index : seq_index + 1]
decoded_sentence = decode_sequence(input_seq)
candidate_corpus.append(preprocess_text(decoded_sentence))
references_corpus.append([preprocess_text(target_texts[seq_index])])
smoothie = SmoothingFunction().method4
bleu_scores = [sentence_bleu(ref, cand, smoothing_function=smoothie) for ref, cand in zip(references_corpus, candidate_corpus)]
average_bleu = sum(bleu_scores) / len(bleu_scores)
print("BLEU score:", average_bleu)
compute_bleu_score(target_texts, 100)
Calculating BLEU scores: 100%|██████████| 100/100 [01:12<00:00, 1.39it/s]
BLEU score: 0.2236331802292942
compute_bleu_score(target_texts, 1000)
Calculating BLEU scores: 100%|██████████| 1000/1000 [14:39<00:00, 1.14it/s]
BLEU score: 0.152040789734918
compute_bleu_score(target_texts, 10000)
Calculating BLEU scores: 8%|▊ | 761/10000 [13:31<2:44:18, 1.07s/it]
[1;31m---------------------------------------------------------------------------[0m [1;31mKeyboardInterrupt[0m Traceback (most recent call last) Cell [1;32mIn[19], line 1[0m [1;32m----> 1[0m [43mcompute_bleu_score[49m[43m([49m[43mtarget_texts[49m[43m,[49m[43m [49m[38;5;241;43m10000[39;49m[43m)[49m Cell [1;32mIn[15], line 13[0m, in [0;36mcompute_bleu_score[1;34m(target_texts, range_limit)[0m [0;32m 11[0m [38;5;28;01mfor[39;00m seq_index [38;5;129;01min[39;00m tqdm([38;5;28mrange[39m(range_limit), desc[38;5;241m=[39m[38;5;124m"[39m[38;5;124mCalculating BLEU scores[39m[38;5;124m"[39m): [0;32m 12[0m input_seq [38;5;241m=[39m encoder_input_data[seq_index : seq_index [38;5;241m+[39m [38;5;241m1[39m] [1;32m---> 13[0m decoded_sentence [38;5;241m=[39m [43mdecode_sequence[49m[43m([49m[43minput_seq[49m[43m)[49m [0;32m 14[0m candidate_corpus[38;5;241m.[39mappend(preprocess_text(decoded_sentence)) [0;32m 15[0m references_corpus[38;5;241m.[39mappend([preprocess_text(target_texts[seq_index])]) Cell [1;32mIn[14], line 6[0m, in [0;36mdecode_sequence[1;34m(input_seq)[0m [0;32m 5[0m [38;5;28;01mdef[39;00m [38;5;21mdecode_sequence[39m(input_seq): [1;32m----> 6[0m states_value [38;5;241m=[39m [43mencoder_model[49m[38;5;241;43m.[39;49m[43mpredict[49m[43m([49m[43minput_seq[49m[43m,[49m[43m [49m[43mverbose[49m[38;5;241;43m=[39;49m[38;5;241;43m0[39;49m[43m)[49m [0;32m 8[0m target_seq [38;5;241m=[39m np[38;5;241m.[39mzeros(([38;5;241m1[39m, [38;5;241m1[39m, num_decoder_tokens)) [0;32m 9[0m target_seq[[38;5;241m0[39m, [38;5;241m0[39m, target_token_index[[38;5;124m"[39m[38;5;130;01m\t[39;00m[38;5;124m"[39m]] [38;5;241m=[39m [38;5;241m1.0[39m File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\keras\utils\traceback_utils.py:65[0m, in [0;36mfilter_traceback.<locals>.error_handler[1;34m(*args, **kwargs)[0m [0;32m 63[0m filtered_tb [38;5;241m=[39m [38;5;28;01mNone[39;00m [0;32m 64[0m [38;5;28;01mtry[39;00m: [1;32m---> 65[0m [38;5;28;01mreturn[39;00m fn([38;5;241m*[39margs, [38;5;241m*[39m[38;5;241m*[39mkwargs) [0;32m 66[0m [38;5;28;01mexcept[39;00m [38;5;167;01mException[39;00m [38;5;28;01mas[39;00m e: [0;32m 67[0m filtered_tb [38;5;241m=[39m _process_traceback_frames(e[38;5;241m.[39m__traceback__) File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\keras\engine\training.py:2220[0m, in [0;36mModel.predict[1;34m(self, x, batch_size, verbose, steps, callbacks, max_queue_size, workers, use_multiprocessing)[0m [0;32m 2211[0m [38;5;28;01mexcept[39;00m [38;5;167;01mValueError[39;00m: [0;32m 2212[0m warnings[38;5;241m.[39mwarn( [0;32m 2213[0m [38;5;124m"[39m[38;5;124mUsing Model.predict with MultiWorkerMirroredStrategy [39m[38;5;124m"[39m [0;32m 2214[0m [38;5;124m"[39m[38;5;124mor TPUStrategy and AutoShardPolicy.FILE might lead to [39m[38;5;124m"[39m [1;32m (...)[0m [0;32m 2217[0m stacklevel[38;5;241m=[39m[38;5;241m2[39m, [0;32m 2218[0m ) [1;32m-> 2220[0m data_handler [38;5;241m=[39m [43mdata_adapter[49m[38;5;241;43m.[39;49m[43mget_data_handler[49m[43m([49m [0;32m 2221[0m [43m [49m[43mx[49m[38;5;241;43m=[39;49m[43mx[49m[43m,[49m [0;32m 2222[0m [43m [49m[43mbatch_size[49m[38;5;241;43m=[39;49m[43mbatch_size[49m[43m,[49m [0;32m 2223[0m [43m [49m[43msteps_per_epoch[49m[38;5;241;43m=[39;49m[43msteps[49m[43m,[49m [0;32m 2224[0m [43m [49m[43minitial_epoch[49m[38;5;241;43m=[39;49m[38;5;241;43m0[39;49m[43m,[49m [0;32m 2225[0m [43m [49m[43mepochs[49m[38;5;241;43m=[39;49m[38;5;241;43m1[39;49m[43m,[49m [0;32m 2226[0m [43m [49m[43mmax_queue_size[49m[38;5;241;43m=[39;49m[43mmax_queue_size[49m[43m,[49m [0;32m 2227[0m [43m [49m[43mworkers[49m[38;5;241;43m=[39;49m[43mworkers[49m[43m,[49m [0;32m 2228[0m [43m [49m[43muse_multiprocessing[49m[38;5;241;43m=[39;49m[43muse_multiprocessing[49m[43m,[49m [0;32m 2229[0m [43m [49m[43mmodel[49m[38;5;241;43m=[39;49m[38;5;28;43mself[39;49m[43m,[49m [0;32m 2230[0m [43m [49m[43msteps_per_execution[49m[38;5;241;43m=[39;49m[38;5;28;43mself[39;49m[38;5;241;43m.[39;49m[43m_steps_per_execution[49m[43m,[49m [0;32m 2231[0m [43m[49m[43m)[49m [0;32m 2233[0m [38;5;66;03m# Container that configures and calls `tf.keras.Callback`s.[39;00m [0;32m 2234[0m [38;5;28;01mif[39;00m [38;5;129;01mnot[39;00m [38;5;28misinstance[39m(callbacks, callbacks_module[38;5;241m.[39mCallbackList): File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\keras\engine\data_adapter.py:1582[0m, in [0;36mget_data_handler[1;34m(*args, **kwargs)[0m [0;32m 1580[0m [38;5;28;01mif[39;00m [38;5;28mgetattr[39m(kwargs[[38;5;124m"[39m[38;5;124mmodel[39m[38;5;124m"[39m], [38;5;124m"[39m[38;5;124m_cluster_coordinator[39m[38;5;124m"[39m, [38;5;28;01mNone[39;00m): [0;32m 1581[0m [38;5;28;01mreturn[39;00m _ClusterCoordinatorDataHandler([38;5;241m*[39margs, [38;5;241m*[39m[38;5;241m*[39mkwargs) [1;32m-> 1582[0m [38;5;28;01mreturn[39;00m DataHandler([38;5;241m*[39margs, [38;5;241m*[39m[38;5;241m*[39mkwargs) File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\keras\engine\data_adapter.py:1262[0m, in [0;36mDataHandler.__init__[1;34m(self, x, y, sample_weight, batch_size, steps_per_epoch, initial_epoch, epochs, shuffle, class_weight, max_queue_size, workers, use_multiprocessing, model, steps_per_execution, distribute)[0m [0;32m 1259[0m [38;5;28mself[39m[38;5;241m.[39m_steps_per_execution [38;5;241m=[39m steps_per_execution [0;32m 1261[0m adapter_cls [38;5;241m=[39m select_data_adapter(x, y) [1;32m-> 1262[0m [38;5;28mself[39m[38;5;241m.[39m_adapter [38;5;241m=[39m [43madapter_cls[49m[43m([49m [0;32m 1263[0m [43m [49m[43mx[49m[43m,[49m [0;32m 1264[0m [43m [49m[43my[49m[43m,[49m [0;32m 1265[0m [43m [49m[43mbatch_size[49m[38;5;241;43m=[39;49m[43mbatch_size[49m[43m,[49m [0;32m 1266[0m [43m [49m[43msteps[49m[38;5;241;43m=[39;49m[43msteps_per_epoch[49m[43m,[49m [0;32m 1267[0m [43m [49m[43mepochs[49m[38;5;241;43m=[39;49m[43mepochs[49m[43m [49m[38;5;241;43m-[39;49m[43m [49m[43minitial_epoch[49m[43m,[49m [0;32m 1268[0m [43m [49m[43msample_weights[49m[38;5;241;43m=[39;49m[43msample_weight[49m[43m,[49m [0;32m 1269[0m [43m [49m[43mshuffle[49m[38;5;241;43m=[39;49m[43mshuffle[49m[43m,[49m [0;32m 1270[0m [43m [49m[43mmax_queue_size[49m[38;5;241;43m=[39;49m[43mmax_queue_size[49m[43m,[49m [0;32m 1271[0m [43m [49m[43mworkers[49m[38;5;241;43m=[39;49m[43mworkers[49m[43m,[49m [0;32m 1272[0m [43m [49m[43muse_multiprocessing[49m[38;5;241;43m=[39;49m[43muse_multiprocessing[49m[43m,[49m [0;32m 1273[0m [43m [49m[43mdistribution_strategy[49m[38;5;241;43m=[39;49m[43mtf[49m[38;5;241;43m.[39;49m[43mdistribute[49m[38;5;241;43m.[39;49m[43mget_strategy[49m[43m([49m[43m)[49m[43m,[49m [0;32m 1274[0m [43m [49m[43mmodel[49m[38;5;241;43m=[39;49m[43mmodel[49m[43m,[49m [0;32m 1275[0m [43m[49m[43m)[49m [0;32m 1277[0m strategy [38;5;241m=[39m tf[38;5;241m.[39mdistribute[38;5;241m.[39mget_strategy() [0;32m 1279[0m [38;5;28mself[39m[38;5;241m.[39m_current_step [38;5;241m=[39m [38;5;241m0[39m File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\keras\engine\data_adapter.py:347[0m, in [0;36mTensorLikeDataAdapter.__init__[1;34m(self, x, y, sample_weights, sample_weight_modes, batch_size, epochs, steps, shuffle, **kwargs)[0m [0;32m 344[0m flat_dataset [38;5;241m=[39m flat_dataset[38;5;241m.[39mshuffle([38;5;241m1024[39m)[38;5;241m.[39mrepeat(epochs) [0;32m 345[0m [38;5;28;01mreturn[39;00m flat_dataset [1;32m--> 347[0m indices_dataset [38;5;241m=[39m [43mindices_dataset[49m[38;5;241;43m.[39;49m[43mflat_map[49m[43m([49m[43mslice_batch_indices[49m[43m)[49m [0;32m 349[0m dataset [38;5;241m=[39m [38;5;28mself[39m[38;5;241m.[39mslice_inputs(indices_dataset, inputs) [0;32m 351[0m [38;5;28;01mif[39;00m shuffle [38;5;241m==[39m [38;5;124m"[39m[38;5;124mbatch[39m[38;5;124m"[39m: File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\data\ops\dataset_ops.py:2245[0m, in [0;36mDatasetV2.flat_map[1;34m(self, map_func, name)[0m [0;32m 2212[0m [38;5;28;01mdef[39;00m [38;5;21mflat_map[39m([38;5;28mself[39m, map_func, name[38;5;241m=[39m[38;5;28;01mNone[39;00m): [0;32m 2213[0m [38;5;250m [39m[38;5;124;03m"""Maps `map_func` across this dataset and flattens the result.[39;00m [0;32m 2214[0m [0;32m 2215[0m [38;5;124;03m The type signature is:[39;00m [1;32m (...)[0m [0;32m 2243[0m [38;5;124;03m Dataset: A `Dataset`.[39;00m [0;32m 2244[0m [38;5;124;03m """[39;00m [1;32m-> 2245[0m [38;5;28;01mreturn[39;00m [43mFlatMapDataset[49m[43m([49m[38;5;28;43mself[39;49m[43m,[49m[43m [49m[43mmap_func[49m[43m,[49m[43m [49m[43mname[49m[38;5;241;43m=[39;49m[43mname[49m[43m)[49m File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\data\ops\dataset_ops.py:5484[0m, in [0;36mFlatMapDataset.__init__[1;34m(self, input_dataset, map_func, name)[0m [0;32m 5482[0m [38;5;250m[39m[38;5;124;03m"""See `Dataset.flat_map()` for details."""[39;00m [0;32m 5483[0m [38;5;28mself[39m[38;5;241m.[39m_input_dataset [38;5;241m=[39m input_dataset [1;32m-> 5484[0m [38;5;28mself[39m[38;5;241m.[39m_map_func [38;5;241m=[39m [43mstructured_function[49m[38;5;241;43m.[39;49m[43mStructuredFunctionWrapper[49m[43m([49m [0;32m 5485[0m [43m [49m[43mmap_func[49m[43m,[49m[43m [49m[38;5;28;43mself[39;49m[38;5;241;43m.[39;49m[43m_transformation_name[49m[43m([49m[43m)[49m[43m,[49m[43m [49m[43mdataset[49m[38;5;241;43m=[39;49m[43minput_dataset[49m[43m)[49m [0;32m 5486[0m [38;5;28;01mif[39;00m [38;5;129;01mnot[39;00m [38;5;28misinstance[39m([38;5;28mself[39m[38;5;241m.[39m_map_func[38;5;241m.[39moutput_structure, DatasetSpec): [0;32m 5487[0m [38;5;28;01mraise[39;00m [38;5;167;01mTypeError[39;00m( [0;32m 5488[0m [38;5;124m"[39m[38;5;124mThe `map_func` argument must return a `Dataset` object. Got [39m[38;5;124m"[39m [0;32m 5489[0m [38;5;124mf[39m[38;5;124m"[39m[38;5;132;01m{[39;00m_get_type([38;5;28mself[39m[38;5;241m.[39m_map_func[38;5;241m.[39moutput_structure)[38;5;132;01m!r}[39;00m[38;5;124m.[39m[38;5;124m"[39m) File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\data\ops\structured_function.py:271[0m, in [0;36mStructuredFunctionWrapper.__init__[1;34m(self, func, transformation_name, dataset, input_classes, input_shapes, input_types, input_structure, add_to_graph, use_legacy_function, defun_kwargs)[0m [0;32m 264[0m warnings[38;5;241m.[39mwarn( [0;32m 265[0m [38;5;124m"[39m[38;5;124mEven though the `tf.config.experimental_run_functions_eagerly` [39m[38;5;124m"[39m [0;32m 266[0m [38;5;124m"[39m[38;5;124moption is set, this option does not apply to tf.data functions. [39m[38;5;124m"[39m [0;32m 267[0m [38;5;124m"[39m[38;5;124mTo force eager execution of tf.data functions, please use [39m[38;5;124m"[39m [0;32m 268[0m [38;5;124m"[39m[38;5;124m`tf.data.experimental.enable_debug_mode()`.[39m[38;5;124m"[39m) [0;32m 269[0m fn_factory [38;5;241m=[39m trace_tf_function(defun_kwargs) [1;32m--> 271[0m [38;5;28mself[39m[38;5;241m.[39m_function [38;5;241m=[39m [43mfn_factory[49m[43m([49m[43m)[49m [0;32m 272[0m [38;5;66;03m# There is no graph to add in eager mode.[39;00m [0;32m 273[0m add_to_graph [38;5;241m&[39m[38;5;241m=[39m [38;5;129;01mnot[39;00m context[38;5;241m.[39mexecuting_eagerly() File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\eager\function.py:2610[0m, in [0;36mFunction.get_concrete_function[1;34m(self, *args, **kwargs)[0m [0;32m 2601[0m [38;5;28;01mdef[39;00m [38;5;21mget_concrete_function[39m([38;5;28mself[39m, [38;5;241m*[39margs, [38;5;241m*[39m[38;5;241m*[39mkwargs): [0;32m 2602[0m [38;5;250m [39m[38;5;124;03m"""Returns a `ConcreteFunction` specialized to inputs and execution context.[39;00m [0;32m 2603[0m [0;32m 2604[0m [38;5;124;03m Args:[39;00m [1;32m (...)[0m [0;32m 2608[0m [38;5;124;03m or `tf.Tensor` or `tf.TensorSpec`.[39;00m [0;32m 2609[0m [38;5;124;03m """[39;00m [1;32m-> 2610[0m graph_function [38;5;241m=[39m [38;5;28mself[39m[38;5;241m.[39m_get_concrete_function_garbage_collected( [0;32m 2611[0m [38;5;241m*[39margs, [38;5;241m*[39m[38;5;241m*[39mkwargs) [0;32m 2612[0m graph_function[38;5;241m.[39m_garbage_collector[38;5;241m.[39mrelease() [38;5;66;03m# pylint: disable=protected-access[39;00m [0;32m 2613[0m [38;5;28;01mreturn[39;00m graph_function File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\eager\function.py:2576[0m, in [0;36mFunction._get_concrete_function_garbage_collected[1;34m(self, *args, **kwargs)[0m [0;32m 2574[0m args, kwargs [38;5;241m=[39m [38;5;28;01mNone[39;00m, [38;5;28;01mNone[39;00m [0;32m 2575[0m [38;5;28;01mwith[39;00m [38;5;28mself[39m[38;5;241m.[39m_lock: [1;32m-> 2576[0m graph_function, _ [38;5;241m=[39m [38;5;28;43mself[39;49m[38;5;241;43m.[39;49m[43m_maybe_define_function[49m[43m([49m[43margs[49m[43m,[49m[43m [49m[43mkwargs[49m[43m)[49m [0;32m 2577[0m seen_names [38;5;241m=[39m [38;5;28mset[39m() [0;32m 2578[0m captured [38;5;241m=[39m object_identity[38;5;241m.[39mObjectIdentitySet( [0;32m 2579[0m graph_function[38;5;241m.[39mgraph[38;5;241m.[39minternal_captures) File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\eager\function.py:2760[0m, in [0;36mFunction._maybe_define_function[1;34m(self, args, kwargs)[0m [0;32m 2758[0m [38;5;66;03m# Only get placeholders for arguments, not captures[39;00m [0;32m 2759[0m args, kwargs [38;5;241m=[39m placeholder_dict[[38;5;124m"[39m[38;5;124margs[39m[38;5;124m"[39m] [1;32m-> 2760[0m graph_function [38;5;241m=[39m [38;5;28;43mself[39;49m[38;5;241;43m.[39;49m[43m_create_graph_function[49m[43m([49m[43margs[49m[43m,[49m[43m [49m[43mkwargs[49m[43m)[49m [0;32m 2762[0m graph_capture_container [38;5;241m=[39m graph_function[38;5;241m.[39mgraph[38;5;241m.[39m_capture_func_lib [38;5;66;03m# pylint: disable=protected-access[39;00m [0;32m 2763[0m [38;5;66;03m# Maintain the list of all captures[39;00m File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\eager\function.py:2670[0m, in [0;36mFunction._create_graph_function[1;34m(self, args, kwargs)[0m [0;32m 2665[0m missing_arg_names [38;5;241m=[39m [ [0;32m 2666[0m [38;5;124m"[39m[38;5;132;01m%s[39;00m[38;5;124m_[39m[38;5;132;01m%d[39;00m[38;5;124m"[39m [38;5;241m%[39m (arg, i) [38;5;28;01mfor[39;00m i, arg [38;5;129;01min[39;00m [38;5;28menumerate[39m(missing_arg_names) [0;32m 2667[0m ] [0;32m 2668[0m arg_names [38;5;241m=[39m base_arg_names [38;5;241m+[39m missing_arg_names [0;32m 2669[0m graph_function [38;5;241m=[39m ConcreteFunction( [1;32m-> 2670[0m [43mfunc_graph_module[49m[38;5;241;43m.[39;49m[43mfunc_graph_from_py_func[49m[43m([49m [0;32m 2671[0m [43m [49m[38;5;28;43mself[39;49m[38;5;241;43m.[39;49m[43m_name[49m[43m,[49m [0;32m 2672[0m [43m [49m[38;5;28;43mself[39;49m[38;5;241;43m.[39;49m[43m_python_function[49m[43m,[49m [0;32m 2673[0m [43m [49m[43margs[49m[43m,[49m [0;32m 2674[0m [43m [49m[43mkwargs[49m[43m,[49m [0;32m 2675[0m [43m [49m[38;5;28;43mself[39;49m[38;5;241;43m.[39;49m[43minput_signature[49m[43m,[49m [0;32m 2676[0m [43m [49m[43mautograph[49m[38;5;241;43m=[39;49m[38;5;28;43mself[39;49m[38;5;241;43m.[39;49m[43m_autograph[49m[43m,[49m [0;32m 2677[0m [43m [49m[43mautograph_options[49m[38;5;241;43m=[39;49m[38;5;28;43mself[39;49m[38;5;241;43m.[39;49m[43m_autograph_options[49m[43m,[49m [0;32m 2678[0m [43m [49m[43marg_names[49m[38;5;241;43m=[39;49m[43marg_names[49m[43m,[49m [0;32m 2679[0m [43m [49m[43mcapture_by_value[49m[38;5;241;43m=[39;49m[38;5;28;43mself[39;49m[38;5;241;43m.[39;49m[43m_capture_by_value[49m[43m)[49m, [0;32m 2680[0m [38;5;28mself[39m[38;5;241m.[39m_function_attributes, [0;32m 2681[0m spec[38;5;241m=[39m[38;5;28mself[39m[38;5;241m.[39mfunction_spec, [0;32m 2682[0m [38;5;66;03m# Tell the ConcreteFunction to clean up its graph once it goes out of[39;00m [0;32m 2683[0m [38;5;66;03m# scope. This is not the default behavior since it gets used in some[39;00m [0;32m 2684[0m [38;5;66;03m# places (like Keras) where the FuncGraph lives longer than the[39;00m [0;32m 2685[0m [38;5;66;03m# ConcreteFunction.[39;00m [0;32m 2686[0m shared_func_graph[38;5;241m=[39m[38;5;28;01mFalse[39;00m) [0;32m 2687[0m [38;5;28;01mreturn[39;00m graph_function File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\framework\func_graph.py:1251[0m, in [0;36mfunc_graph_from_py_func[1;34m(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, acd_record_initial_resource_uses)[0m [0;32m 1247[0m func_outputs [38;5;241m=[39m python_func([38;5;241m*[39mfunc_args, [38;5;241m*[39m[38;5;241m*[39mfunc_kwargs) [0;32m 1249[0m [38;5;66;03m# invariant: `func_outputs` contains only Tensors, CompositeTensors,[39;00m [0;32m 1250[0m [38;5;66;03m# TensorArrays and `None`s.[39;00m [1;32m-> 1251[0m func_outputs [38;5;241m=[39m [43mnest[49m[38;5;241;43m.[39;49m[43mmap_structure[49m[43m([49m [0;32m 1252[0m [43m [49m[43mconvert[49m[43m,[49m[43m [49m[43mfunc_outputs[49m[43m,[49m[43m [49m[43mexpand_composites[49m[38;5;241;43m=[39;49m[38;5;28;43;01mTrue[39;49;00m[43m)[49m [0;32m 1254[0m check_func_mutation(func_args_before, func_kwargs_before, func_args, [0;32m 1255[0m func_kwargs, original_func) [0;32m 1256[0m [38;5;28;01mfinally[39;00m: File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\util\nest.py:917[0m, in [0;36mmap_structure[1;34m(func, *structure, **kwargs)[0m [0;32m 913[0m flat_structure [38;5;241m=[39m (flatten(s, expand_composites) [38;5;28;01mfor[39;00m s [38;5;129;01min[39;00m structure) [0;32m 914[0m entries [38;5;241m=[39m [38;5;28mzip[39m([38;5;241m*[39mflat_structure) [0;32m 916[0m [38;5;28;01mreturn[39;00m pack_sequence_as( [1;32m--> 917[0m structure[[38;5;241m0[39m], [func([38;5;241m*[39mx) [38;5;28;01mfor[39;00m x [38;5;129;01min[39;00m entries], [0;32m 918[0m expand_composites[38;5;241m=[39mexpand_composites) File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\util\nest.py:917[0m, in [0;36m<listcomp>[1;34m(.0)[0m [0;32m 913[0m flat_structure [38;5;241m=[39m (flatten(s, expand_composites) [38;5;28;01mfor[39;00m s [38;5;129;01min[39;00m structure) [0;32m 914[0m entries [38;5;241m=[39m [38;5;28mzip[39m([38;5;241m*[39mflat_structure) [0;32m 916[0m [38;5;28;01mreturn[39;00m pack_sequence_as( [1;32m--> 917[0m structure[[38;5;241m0[39m], [[43mfunc[49m[43m([49m[38;5;241;43m*[39;49m[43mx[49m[43m)[49m [38;5;28;01mfor[39;00m x [38;5;129;01min[39;00m entries], [0;32m 918[0m expand_composites[38;5;241m=[39mexpand_composites) File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\framework\func_graph.py:1210[0m, in [0;36mfunc_graph_from_py_func.<locals>.convert[1;34m(x)[0m [0;32m 1203[0m [38;5;28;01mraise[39;00m [38;5;167;01mTypeError[39;00m( [0;32m 1204[0m [38;5;124m"[39m[38;5;124mTo be compatible with tf.function, Python functions [39m[38;5;124m"[39m [0;32m 1205[0m [38;5;124m"[39m[38;5;124mmust return zero or more Tensors or ExtensionTypes or None [39m[38;5;124m"[39m [0;32m 1206[0m [38;5;124mf[39m[38;5;124m"[39m[38;5;124mvalues; in compilation of [39m[38;5;132;01m{[39;00m[38;5;28mstr[39m(python_func)[38;5;132;01m}[39;00m[38;5;124m, found return [39m[38;5;124m"[39m [0;32m 1207[0m [38;5;124mf[39m[38;5;124m"[39m[38;5;124mvalue of type [39m[38;5;132;01m{[39;00m[38;5;28mtype[39m(x)[38;5;241m.[39m[38;5;18m__name__[39m[38;5;132;01m}[39;00m[38;5;124m, which is not a Tensor or [39m[38;5;124m"[39m [0;32m 1208[0m [38;5;124m"[39m[38;5;124mExtensionType.[39m[38;5;124m"[39m) [0;32m 1209[0m [38;5;28;01mif[39;00m add_control_dependencies: [1;32m-> 1210[0m x [38;5;241m=[39m [43mdeps_ctx[49m[38;5;241;43m.[39;49m[43mmark_as_return[49m[43m([49m[43mx[49m[43m)[49m [0;32m 1211[0m [38;5;28;01mreturn[39;00m x File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\framework\auto_control_deps.py:249[0m, in [0;36mAutomaticControlDependencies.mark_as_return[1;34m(self, tensor)[0m [0;32m 244[0m [38;5;28;01mreturn[39;00m tensor_array_ops[38;5;241m.[39mbuild_ta_with_new_flow(tensor, flow) [0;32m 245[0m [38;5;66;03m# We want to make the return values depend on the stateful operations, but[39;00m [0;32m 246[0m [38;5;66;03m# we don't want to introduce a cycle, so we make the return value the result[39;00m [0;32m 247[0m [38;5;66;03m# of a new identity operation that the stateful operations definitely don't[39;00m [0;32m 248[0m [38;5;66;03m# depend on.[39;00m [1;32m--> 249[0m tensor [38;5;241m=[39m [43marray_ops[49m[38;5;241;43m.[39;49m[43midentity[49m[43m([49m[43mtensor[49m[43m)[49m [0;32m 250[0m [38;5;28mself[39m[38;5;241m.[39m_returned_tensors[38;5;241m.[39madd(tensor) [0;32m 251[0m [38;5;28;01mreturn[39;00m tensor File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\util\traceback_utils.py:150[0m, in [0;36mfilter_traceback.<locals>.error_handler[1;34m(*args, **kwargs)[0m [0;32m 148[0m filtered_tb [38;5;241m=[39m [38;5;28;01mNone[39;00m [0;32m 149[0m [38;5;28;01mtry[39;00m: [1;32m--> 150[0m [38;5;28;01mreturn[39;00m fn([38;5;241m*[39margs, [38;5;241m*[39m[38;5;241m*[39mkwargs) [0;32m 151[0m [38;5;28;01mexcept[39;00m [38;5;167;01mException[39;00m [38;5;28;01mas[39;00m e: [0;32m 152[0m filtered_tb [38;5;241m=[39m _process_traceback_frames(e[38;5;241m.[39m__traceback__) File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\util\dispatch.py:1176[0m, in [0;36madd_dispatch_support.<locals>.decorator.<locals>.op_dispatch_handler[1;34m(*args, **kwargs)[0m [0;32m 1174[0m [38;5;66;03m# Fallback dispatch system (dispatch v1):[39;00m [0;32m 1175[0m [38;5;28;01mtry[39;00m: [1;32m-> 1176[0m [38;5;28;01mreturn[39;00m dispatch_target([38;5;241m*[39margs, [38;5;241m*[39m[38;5;241m*[39mkwargs) [0;32m 1177[0m [38;5;28;01mexcept[39;00m ([38;5;167;01mTypeError[39;00m, [38;5;167;01mValueError[39;00m): [0;32m 1178[0m [38;5;66;03m# Note: convert_to_eager_tensor currently raises a ValueError, not a[39;00m [0;32m 1179[0m [38;5;66;03m# TypeError, when given unexpected types. So we need to catch both.[39;00m [0;32m 1180[0m result [38;5;241m=[39m dispatch(op_dispatch_handler, args, kwargs) File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\ops\array_ops.py:294[0m, in [0;36midentity[1;34m(input, name)[0m [0;32m 290[0m [38;5;28;01mif[39;00m context[38;5;241m.[39mexecuting_eagerly() [38;5;129;01mand[39;00m [38;5;129;01mnot[39;00m [38;5;28mhasattr[39m([38;5;28minput[39m, [38;5;124m"[39m[38;5;124mgraph[39m[38;5;124m"[39m): [0;32m 291[0m [38;5;66;03m# Make sure we get an input with handle data attached from resource[39;00m [0;32m 292[0m [38;5;66;03m# variables. Variables have correct handle data when graph building.[39;00m [0;32m 293[0m [38;5;28minput[39m [38;5;241m=[39m ops[38;5;241m.[39mconvert_to_tensor([38;5;28minput[39m) [1;32m--> 294[0m ret [38;5;241m=[39m [43mgen_array_ops[49m[38;5;241;43m.[39;49m[43midentity[49m[43m([49m[38;5;28;43minput[39;49m[43m,[49m[43m [49m[43mname[49m[38;5;241;43m=[39;49m[43mname[49m[43m)[49m [0;32m 295[0m [38;5;66;03m# Propagate handle data for happier shape inference for resource variables.[39;00m [0;32m 296[0m [38;5;28;01mif[39;00m [38;5;28mhasattr[39m([38;5;28minput[39m, [38;5;124m"[39m[38;5;124m_handle_data[39m[38;5;124m"[39m): File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\ops\gen_array_ops.py:4081[0m, in [0;36midentity[1;34m(input, name)[0m [0;32m 4079[0m [38;5;28;01mpass[39;00m [38;5;66;03m# Add nodes to the TensorFlow graph.[39;00m [0;32m 4080[0m [38;5;66;03m# Add nodes to the TensorFlow graph.[39;00m [1;32m-> 4081[0m _, _, _op, _outputs [38;5;241m=[39m [43m_op_def_library[49m[38;5;241;43m.[39;49m[43m_apply_op_helper[49m[43m([49m [0;32m 4082[0m [43m [49m[38;5;124;43m"[39;49m[38;5;124;43mIdentity[39;49m[38;5;124;43m"[39;49m[43m,[49m[43m [49m[38;5;28;43minput[39;49m[38;5;241;43m=[39;49m[38;5;28;43minput[39;49m[43m,[49m[43m [49m[43mname[49m[38;5;241;43m=[39;49m[43mname[49m[43m)[49m [0;32m 4083[0m _result [38;5;241m=[39m _outputs[:] [0;32m 4084[0m [38;5;28;01mif[39;00m _execute[38;5;241m.[39mmust_record_gradient(): File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\framework\op_def_library.py:797[0m, in [0;36m_apply_op_helper[1;34m(op_type_name, name, **keywords)[0m [0;32m 792[0m must_colocate_inputs [38;5;241m=[39m [val [38;5;28;01mfor[39;00m arg, val [38;5;129;01min[39;00m [38;5;28mzip[39m(op_def[38;5;241m.[39minput_arg, inputs) [0;32m 793[0m [38;5;28;01mif[39;00m arg[38;5;241m.[39mis_ref] [0;32m 794[0m [38;5;28;01mwith[39;00m _MaybeColocateWith(must_colocate_inputs): [0;32m 795[0m [38;5;66;03m# Add Op to graph[39;00m [0;32m 796[0m [38;5;66;03m# pylint: disable=protected-access[39;00m [1;32m--> 797[0m op [38;5;241m=[39m [43mg[49m[38;5;241;43m.[39;49m[43m_create_op_internal[49m[43m([49m[43mop_type_name[49m[43m,[49m[43m [49m[43minputs[49m[43m,[49m[43m [49m[43mdtypes[49m[38;5;241;43m=[39;49m[38;5;28;43;01mNone[39;49;00m[43m,[49m [0;32m 798[0m [43m [49m[43mname[49m[38;5;241;43m=[39;49m[43mscope[49m[43m,[49m[43m [49m[43minput_types[49m[38;5;241;43m=[39;49m[43minput_types[49m[43m,[49m [0;32m 799[0m [43m [49m[43mattrs[49m[38;5;241;43m=[39;49m[43mattr_protos[49m[43m,[49m[43m [49m[43mop_def[49m[38;5;241;43m=[39;49m[43mop_def[49m[43m)[49m [0;32m 801[0m [38;5;66;03m# `outputs` is returned as a separate return value so that the output[39;00m [0;32m 802[0m [38;5;66;03m# tensors can the `op` per se can be decoupled so that the[39;00m [0;32m 803[0m [38;5;66;03m# `op_callbacks` can function properly. See framework/op_callbacks.py[39;00m [0;32m 804[0m [38;5;66;03m# for more details.[39;00m [0;32m 805[0m outputs [38;5;241m=[39m op[38;5;241m.[39moutputs File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\framework\func_graph.py:735[0m, in [0;36mFuncGraph._create_op_internal[1;34m(self, op_type, inputs, dtypes, input_types, name, attrs, op_def, compute_device)[0m [0;32m 733[0m inp [38;5;241m=[39m [38;5;28mself[39m[38;5;241m.[39mcapture(inp) [0;32m 734[0m captured_inputs[38;5;241m.[39mappend(inp) [1;32m--> 735[0m [38;5;28;01mreturn[39;00m [38;5;28;43msuper[39;49m[43m([49m[43mFuncGraph[49m[43m,[49m[43m [49m[38;5;28;43mself[39;49m[43m)[49m[38;5;241;43m.[39;49m[43m_create_op_internal[49m[43m([49m[43m [49m[38;5;66;43;03m# pylint: disable=protected-access[39;49;00m [0;32m 736[0m [43m [49m[43mop_type[49m[43m,[49m[43m [49m[43mcaptured_inputs[49m[43m,[49m[43m [49m[43mdtypes[49m[43m,[49m[43m [49m[43minput_types[49m[43m,[49m[43m [49m[43mname[49m[43m,[49m[43m [49m[43mattrs[49m[43m,[49m[43m [49m[43mop_def[49m[43m,[49m [0;32m 737[0m [43m [49m[43mcompute_device[49m[43m)[49m File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\framework\ops.py:3800[0m, in [0;36mGraph._create_op_internal[1;34m(self, op_type, inputs, dtypes, input_types, name, attrs, op_def, compute_device)[0m [0;32m 3797[0m [38;5;66;03m# _create_op_helper mutates the new Operation. `_mutation_lock` ensures a[39;00m [0;32m 3798[0m [38;5;66;03m# Session.run call cannot occur between creating and mutating the op.[39;00m [0;32m 3799[0m [38;5;28;01mwith[39;00m [38;5;28mself[39m[38;5;241m.[39m_mutation_lock(): [1;32m-> 3800[0m ret [38;5;241m=[39m [43mOperation[49m[43m([49m [0;32m 3801[0m [43m [49m[43mnode_def[49m[43m,[49m [0;32m 3802[0m [43m [49m[38;5;28;43mself[39;49m[43m,[49m [0;32m 3803[0m [43m [49m[43minputs[49m[38;5;241;43m=[39;49m[43minputs[49m[43m,[49m [0;32m 3804[0m [43m [49m[43moutput_types[49m[38;5;241;43m=[39;49m[43mdtypes[49m[43m,[49m [0;32m 3805[0m [43m [49m[43mcontrol_inputs[49m[38;5;241;43m=[39;49m[43mcontrol_inputs[49m[43m,[49m [0;32m 3806[0m [43m [49m[43minput_types[49m[38;5;241;43m=[39;49m[43minput_types[49m[43m,[49m [0;32m 3807[0m [43m [49m[43moriginal_op[49m[38;5;241;43m=[39;49m[38;5;28;43mself[39;49m[38;5;241;43m.[39;49m[43m_default_original_op[49m[43m,[49m [0;32m 3808[0m [43m [49m[43mop_def[49m[38;5;241;43m=[39;49m[43mop_def[49m[43m)[49m [0;32m 3809[0m [38;5;28mself[39m[38;5;241m.[39m_create_op_helper(ret, compute_device[38;5;241m=[39mcompute_device) [0;32m 3810[0m [38;5;28;01mreturn[39;00m ret File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\framework\ops.py:2108[0m, in [0;36mOperation.__init__[1;34m(***failed resolving arguments***)[0m [0;32m 2105[0m control_input_ops[38;5;241m.[39mappend(control_op) [0;32m 2107[0m [38;5;66;03m# Initialize c_op from node_def and other inputs[39;00m [1;32m-> 2108[0m c_op [38;5;241m=[39m [43m_create_c_op[49m[43m([49m[43mg[49m[43m,[49m[43m [49m[43mnode_def[49m[43m,[49m[43m [49m[43minputs[49m[43m,[49m[43m [49m[43mcontrol_input_ops[49m[43m,[49m[43m [49m[43mop_def[49m[38;5;241;43m=[39;49m[43mop_def[49m[43m)[49m [0;32m 2109[0m [38;5;28mself[39m[38;5;241m.[39m_init_from_c_op(c_op[38;5;241m=[39mc_op, g[38;5;241m=[39mg) [0;32m 2111[0m [38;5;28mself[39m[38;5;241m.[39m_original_op [38;5;241m=[39m original_op File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\util\traceback_utils.py:150[0m, in [0;36mfilter_traceback.<locals>.error_handler[1;34m(*args, **kwargs)[0m [0;32m 148[0m filtered_tb [38;5;241m=[39m [38;5;28;01mNone[39;00m [0;32m 149[0m [38;5;28;01mtry[39;00m: [1;32m--> 150[0m [38;5;28;01mreturn[39;00m fn([38;5;241m*[39margs, [38;5;241m*[39m[38;5;241m*[39mkwargs) [0;32m 151[0m [38;5;28;01mexcept[39;00m [38;5;167;01mException[39;00m [38;5;28;01mas[39;00m e: [0;32m 152[0m filtered_tb [38;5;241m=[39m _process_traceback_frames(e[38;5;241m.[39m__traceback__) File [1;32m~\PycharmProjects\pythonProject\venv\lib\site-packages\tensorflow\python\framework\ops.py:1966[0m, in [0;36m_create_c_op[1;34m(graph, node_def, inputs, control_inputs, op_def, extract_traceback)[0m [0;32m 1962[0m pywrap_tf_session[38;5;241m.[39mTF_SetAttrValueProto(op_desc, compat[38;5;241m.[39mas_str(name), [0;32m 1963[0m serialized) [0;32m 1965[0m [38;5;28;01mtry[39;00m: [1;32m-> 1966[0m c_op [38;5;241m=[39m [43mpywrap_tf_session[49m[38;5;241;43m.[39;49m[43mTF_FinishOperation[49m[43m([49m[43mop_desc[49m[43m)[49m [0;32m 1967[0m [38;5;28;01mexcept[39;00m errors[38;5;241m.[39mInvalidArgumentError [38;5;28;01mas[39;00m e: [0;32m 1968[0m [38;5;66;03m# Convert to ValueError for backwards compatibility.[39;00m [0;32m 1969[0m [38;5;28;01mraise[39;00m [38;5;167;01mValueError[39;00m(e[38;5;241m.[39mmessage) [1;31mKeyboardInterrupt[0m:
Patrząc na czas wykonywania się poszczególnych kroków, uczenie modelu przez kilka godzin, blue dla wszystkich elementów powyżej 8 godzin bez rezultatów, porzuciłem dalsze próby i przeszedłem na rozwiązanie na pyTorch, które wykonało się diametralnie szybciej (oba na cuda)