Geometry flips slides turns

TensorFlow Model Only Predicts 2 Classes out of 475

2024.06.01 14:25 Jonasbru3m TensorFlow Model Only Predicts 2 Classes out of 475

Hello Reddit Community,
For my Bachelor Thesis im currently trying to train my first ever model with tensorflow, but I'm encountering a strange issue where my model only predicts 2 classes out of the 475 possible classes. The model was trained on a HPC with 304 Nvidia A100 and 352 Nvidia A40 GPGPUs in 82 nodes.
Thats my training script:
 import os import tensorflow as tf from tensorflow.keras.preprocessing.image import ImageDataGenerator from tensorflow.keras.applications import EfficientNetB7 from tensorflow.keras import layers, models from tensorflow.keras.callbacks import ModelCheckpoint, TensorBoard import tensorflow_addons as tfa import logging import json # Setup logging logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') # Check if GPUs are available gpus = tf.config.experimental.list_physical_devices('GPU') if gpus: try: for gpu in gpus: tf.config.experimental.set_memory_growth(gpu, True) tf.config.set_visible_devices(gpus, 'GPU') logging.info(f"Using {len(gpus)} GPUs.") except RuntimeError as e: logging.error(e) else: logging.error("No GPUs found. Check your device configuration.") # Data directory data_dir = "/app/FOOD475/" # Image dimensions and batch size img_height, img_width = 600, 600 batch_size = 64 # Data preprocessing and augmentation train_datagen = ImageDataGenerator( rescale=1./255, rotation_range=40, width_shift_range=0.2, height_shift_range=0.2, shear_range=0.2, zoom_range=0.2, horizontal_flip=True, fill_mode='nearest', validation_split=0.25 ) # Load and preprocess images train_generator = train_datagen.flow_from_directory( data_dir, target_size=(img_height, img_width), batch_size=batch_size, class_mode='categorical', subset='training' ) validation_generator = train_datagen.flow_from_directory( data_dir, target_size=(img_height, img_width), batch_size=batch_size, class_mode='categorical', subset='validation' ) # Model creation function def create_model(input_shape, num_classes): base_model = EfficientNetB7(include_top=False, input_shape=input_shape, weights='imagenet') base_model.trainable = True inputs = layers.Input(shape=input_shape) x = base_model(inputs, training=True) x = layers.GlobalAveragePooling2D()(x) outputs = layers.Dense(num_classes, activation='softmax')(x) model = models.Model(inputs, outputs) return model def find_latest_saved_model(checkpoint_dir): logging.info(f"Looking in checkpoint directory: {checkpoint_dir}") if not os.path.exists(checkpoint_dir): logging.error(f"Checkpoint directory does not exist: {checkpoint_dir}") return None, 0 subdirs = [os.path.join(checkpoint_dir, d) for d in os.listdir(checkpoint_dir) if os.path.isdir(os.path.join(checkpoint_dir, d))] if not subdirs: logging.info("No subdirectories found for checkpoints.") return None, 0 latest_subdir = max(subdirs, key=lambda x: int(os.path.basename(x))) latest_epoch = int(os.path.basename(latest_subdir)) logging.info(f"Latest model directory: {latest_subdir}, Epoch: {latest_epoch}") if os.path.exists(os.path.join(latest_subdir, 'saved_model.pb')): return latest_subdir, latest_epoch else: logging.info("No saved_model.pb found in the latest directory.") return None, 0 # Mirrored strategy for multi-GPU training strategy = tf.distribute.MirroredStrategy() with strategy.scope(): saved_model_dir = 'model_training' checkpoint_dir = os.path.join(saved_model_dir, 'checkpoints') latest_saved_model, latest_epoch = find_latest_saved_model(checkpoint_dir) if latest_saved_model: logging.info(f"Loading model from {latest_saved_model}") model = tf.keras.models.load_model(latest_saved_model) else: logging.info("No saved model found. Creating a new model.") model = create_model((img_height, img_width, 3), len(train_generator.class_indices)) if not os.path.exists(saved_model_dir): os.makedirs(saved_model_dir) summary_path = os.path.join(saved_model_dir, 'model_summary.txt') with open(summary_path, 'w') as f: model.summary(print_fn=lambda x: f.write(x + '\n')) logging.info(f"Model summary saved to {summary_path}") optimizer = tf.keras.optimizers.Adam(learning_rate=0.0002) model.compile(optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy', tf.keras.metrics.TopKCategoricalAccuracy(k=5), tfa.metrics.F1Score(num_classes=len(train_generator.class_indices), average='macro')]) # Custom Callback for Saving the Best Model in SavedModel format class SaveBestModelTF(tf.keras.callbacks.Callback): def __init__(self, monitor='val_accuracy', saved_model_dir='model_training'): super(SaveBestModelTF, self).__init__() self.monitor = monitor self.saved_model_dir = saved_model_dir def on_epoch_end(self, epoch, logs=None): current = logs.get(self.monitor) if current is None: logging.warning(f"Monitor '{self.monitor}' for saving the model is not available in logs.") return logging.info(f"Epoch {epoch + 1}: saving model to {self.saved_model_dir}/checkpoints/{epoch + 1}") epoch_path = os.path.join(self.saved_model_dir, 'checkpoints', str(epoch + 1)) if not os.path.exists(epoch_path): os.makedirs(epoch_path) self.model.save(epoch_path, save_format='tf') # Callbacks for monitoring progress tensorboard_cb = TensorBoard(log_dir='./logs') # Save class indices to a JSON file class_indices_path = 'model_training/class_indices.json' if not os.path.exists(os.path.dirname(class_indices_path)): os.makedirs(os.path.dirname(class_indices_path), exist_ok=True) logging.info(f"Directory {os.path.dirname(class_indices_path)} created.") with open(class_indices_path, 'w') as file: json.dump(train_generator.class_indices, file) logging.info(f"Class indices saved to {class_indices_path}") # Model training total_epochs = 7 model.fit( train_generator, initial_epoch=latest_epoch, # Start from the next epoch epochs=total_epochs, validation_data=validation_generator, callbacks=[SaveBestModelTF(saved_model_dir=saved_model_dir), tensorboard_cb] ) # Evaluate the model eval_result = model.evaluate(validation_generator) logging.info(f'Validation Loss: {eval_result[0]}, Validation Accuracy: {eval_result[1]}') # Save the final model as a SavedModel format (including .pb files) model.save('model_training/finished_model') logging.info("Finished model saved in SavedModel format at 'model_training/finished_model'") # Convert to TensorFlow Lite converter = tf.lite.TFLiteConverter.from_saved_model('model_training/finished_model') tflite_model = converter.convert() tflite_path = 'model_training/lite_model/trained_model_lite.tflite' if not os.path.exists(os.path.dirname(tflite_path)): os.makedirs(os.path.dirname(tflite_path), exist_ok=True) logging.info(f"Directory {os.path.dirname(tflite_path)} created.") with open(tflite_path, 'wb') as f: f.write(tflite_model) logging.info(f"Model converted and saved as {tflite_path}") 
During training i got following output:
Found 182235 images belonging to 475 classes. Found 60544 images belonging to 475 classes. Epoch 1/7 2848/2848 [==============================] - 11914s 4s/step - loss: 1.7624 - accuracy: 0.5931 - top_k_categorical_accuracy: 0.8152 - f1_score: 0.4739 - val_loss: 1.1666 - val_accuracy: 0.7043 - val_top_k_categorical_accuracy: 0.9013 - val_f1_score: 0.6053 Epoch 2/7 2848/2848 [==============================] - 11096s 4s/step - loss: 0.8293 - accuracy: 0.7788 - top_k_categorical_accuracy: 0.9435 - f1_score: 0.7094 - val_loss: 0.9409 - val_accuracy: 0.7533 - val_top_k_categorical_accuracy: 0.9277 - val_f1_score: 0.6818 Epoch 3/7 2848/2848 [==============================] - 11123s 4s/step - loss: 0.6247 - accuracy: 0.8274 - top_k_categorical_accuracy: 0.9632 - f1_score: 0.7760 - val_loss: 0.8422 - val_accuracy: 0.7761 - val_top_k_categorical_accuracy: 0.9386 - val_f1_score: 0.7080 Epoch 4/7 2848/2848 [==============================] - 11101s 4s/step - loss: 0.5070 - accuracy: 0.8562 - top_k_categorical_accuracy: 0.9743 - f1_score: 0.8165 - val_loss: 0.8002 - val_accuracy: 0.7885 - val_top_k_categorical_accuracy: 0.9428 - val_f1_score: 0.7249 Epoch 5/7 2848/2848 [==============================] - 11079s 4s/step - loss: 0.4261 - accuracy: 0.8766 - top_k_categorical_accuracy: 0.9814 - f1_score: 0.8445 - val_loss: 0.7757 - val_accuracy: 0.7940 - val_top_k_categorical_accuracy: 0.9458 - val_f1_score: 0.7404 Epoch 6/7 2848/2848 [==============================] - 11100s 4s/step - loss: 0.3641 - accuracy: 0.8932 - top_k_categorical_accuracy: 0.9856 - f1_score: 0.8657 - val_loss: 0.7639 - val_accuracy: 0.8003 - val_top_k_categorical_accuracy: 0.9472 - val_f1_score: 0.7432 Epoch 7/7 2848/2848 [==============================] - 11129s 4s/step - loss: 0.3142 - accuracy: 0.9068 - top_k_categorical_accuracy: 0.9889 - f1_score: 0.8838 - val_loss: 0.7701 - val_accuracy: 0.8014 - val_top_k_categorical_accuracy: 0.9470 - val_f1_score: 0.7474 946/946 [==============================] - 2671s 3s/step - loss: 0.7682 - accuracy: 0.8008 - top_k_categorical_accuracy: 0.9470 - f1_score: 0.7456 
And when I try to load the model and make a prediction with this code:
class own: def __init__(self): if not os.path.exists("models/own"): raise FileNotFoundError(f"Model path models/own does not exist") try: self.model = tf.keras.models.load_model("models/own", custom_objects={'F1Score': F1Score}) except Exception as e: print(f"Error loading model: {e}") raise if not os.path.exists("models/own/class_indices.json"): raise FileNotFoundError(f"Class indices path models/own/class_indices.json does not exist") with open("models/own/class_indices.json", 'r') as file: self.class_indices = json.load(file) self.index_to_class = {v: k for k, v in self.class_indices.items()} def classify(self, img_path): if not os.path.exists(img_path): raise FileNotFoundError(f"Image path {img_path} does not exist") # Load and preprocess the image img = tf.keras.preprocessing.image.load_img(img_path, target_size=(600, 600)) img_array = tf.keras.preprocessing.image.img_to_array(img) img_array = np.expand_dims(img_array, axis=0) img_array /= 255.0 # Make prediction predictions = self.model.predict(img_array) print("Raw predictions:", predictions) top_index = np.argmax(predictions[0]) top_class = self.index_to_class[top_index] print(f"Top class: {top_class}, Probability: {predictions[0][top_index]}") top_n = 5 top_indices = np.argsort(predictions[0])[-top_n:][::-1] for idx in top_indices: print(f"Class: {self.index_to_class[idx]}, Probability: {predictions[0][idx]}") return top_class 
it always either predicts Steak or Omelette:
2024-06-01 14:17:27.571776: I tensorflow/core/util/port.cc:113] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. WARNING:tensorflow:From C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\losses.py:2976: The name tf.losses.sparse_softmax_cross_entropy is deprecated. Please use tf.compat.v1.losses.sparse_softmax_cross_entropy instead. C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\tensorflow_addons\utils\tfa_eol_msg.py:23: UserWarning: TensorFlow Addons (TFA) has ended development and introduction of new features. TFA has entered a minimal maintenance and release mode until a planned end of life in May 2024. Please modify downstream libraries to take dependencies from other repositories in our TensorFlow community (e.g. Keras, Keras-CV, and Keras-NLP). For more information see: warnings.warn( C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\tensorflow_addons\utils\ensure_tf_install.py:53: UserWarning: Tensorflow Addons supports using Python ops for all Tensorflow versions above or equal to 2.12.0 and strictly below 2.15.0 (nightly versions are not supported). The versions of TensorFlow you are currently using is 2.15.0 and is not supported. Some things might work, some things might not. If you were to encounter a bug, do not file an issue. If you want to make sure you're using a tested and supported configuration, either change the TensorFlow version or the TensorFlow Addons's version. You can find the compatibility matrix in TensorFlow Addon's readme: warnings.warn( WARNING:tensorflow:From C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\saving\legacy\saved_model\load.py:107: The name tf.gfile.Exists is deprecated. Please use tf.io.gfile.exists instead. 2024-06-01 14:17:31.363666: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: SSE SSE2 SSE3 SSE4.1 SSE4.2 AVX2 AVX512F AVX512_VNNI AVX512_BF16 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. WARNING:tensorflow:From C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\engine\functional.py:156: The name tf.executing_eagerly_outside_functions is deprecated. Please use tf.compat.v1.executing_eagerly_outside_functions instead. WARNING:tensorflow:From C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\layers\normalization\batch_normalization.py:979: The name tf.nn.fused_batch_norm is deprecated. Please use tf.compat.v1.nn.fused_batch_norm instead. 1/1 [==============================] - 4s 4s/step Raw predictions: [[4.23421043e-05 1.45377373e-06 1.09034730e-02 1.19525917e-04 4.45407240e-05 5.72818244e-05 5.68609731e-03 5.15926695e-05 1.89958355e-05 1.39491487e-04 3.20717366e-03 9.63417915e-06 1.22947793e-03 4.01171012e-04 3.64649204e-05 1.75396308e-05 3.09416023e-03 7.56465085e-03 2.89075997e-05 3.90331191e-03 2.16231216e-03 4.18351328e-06 5.89632022e-04 9.40740295e-03 6.80321036e-03 2.32697069e-03 4.23964392e-03 1.56047070e-04 2.14435873e-04 6.95710623e-05 1.38103365e-04 1.78470847e-03 3.75193194e-03 5.94434096e-03 5.69255608e-05 7.57165905e-03 1.52613886e-03 9.48755944e-04 8.21925176e-04 3.18029453e-03 3.89393512e-03 8.41296278e-05 8.34997976e-04 3.14124190e-04 6.81638776e-04 1.10320523e-02 1.10815199e-04 6.18589204e-03 2.17406079e-02 3.72037102e-05 1.65579877e-05 1.30886221e-02 1.01435784e-04 2.13157946e-05 1.25499619e-05 8.94762017e-03 4.36880719e-03 4.78018774e-03 8.53170827e-03 1.45823974e-02 1.05571962e-05 1.12631078e-05 5.09415939e-03 8.12840741e-03 1.48212257e-05 1.52864438e-02 9.66716034e-05 2.25000476e-04 3.60531732e-04 9.28066402e-06 8.15156789e-04 1.09069003e-02 3.43796797e-04 2.53324561e-05 7.89516326e-03 1.44943051e-05 4.06841224e-04 1.67445414e-05 3.78527766e-05 1.80476491e-04 3.33699776e-04 4.13847056e-06 3.32273915e-03 6.51864940e-03 7.48403618e-05 2.68448726e-04 1.54245936e-03 2.95383972e-03 2.26996126e-05 3.64100002e-03 2.81597768e-05 3.11967051e-05 1.48438021e-05 8.46863433e-04 4.05767525e-04 1.75380992e-04 4.76581818e-06 5.42160356e-04 2.19287374e-03 1.18714366e-02 1.41884899e-04 8.76697595e-06 3.85931274e-03 4.37544841e-05 4.01919424e-05 3.87528981e-03 3.88057524e-05 2.69062322e-04 4.46968805e-03 1.17368818e-05 3.70194939e-05 1.55831876e-04 1.63894765e-05 2.38729117e-04 1.19046052e-03 2.12675819e-04 1.08185853e-03 3.01667496e-05 6.18575094e-03 3.91955400e-05 1.40065713e-05 3.02084809e-04 6.46927813e-03 3.37069832e-05 5.15250103e-05 2.31142567e-05 2.20274273e-03 3.17445702e-05 1.04452763e-02 6.80019803e-05 7.81101780e-03 1.23853814e-02 1.04819983e-02 3.20679283e-05 6.71340758e-03 6.94293885e-06 1.98310101e-03 5.29599565e-05 9.02036484e-03 4.57535089e-06 1.93145883e-03 4.06190008e-03 8.42716638e-03 1.50314684e-03 8.58115556e-04 1.22383237e-03 8.49474862e-04 5.48258470e-03 6.09953167e-05 1.57669128e-03 5.43692382e-03 4.88058169e-04 6.75312986e-05 3.43937165e-04 1.93276245e-03 4.06867871e-03 5.20323374e-05 7.78318281e-05 1.93508764e-04 1.14409677e-05 2.21324177e-03 1.90052821e-03 8.52691382e-03 2.43102224e-03 2.88419239e-03 2.53974522e-05 9.51182563e-04 2.32981285e-03 9.86064842e-05 4.14316915e-03 1.66544644e-03 1.02754391e-04 3.95776224e-05 3.02393187e-06 1.32082617e-02 4.14707232e-04 3.40229672e-05 4.81802830e-03 1.90598912e-05 4.08358377e-04 5.95443300e-04 1.22634810e-04 5.74091624e-04 8.57623760e-03 2.60962266e-03 2.95263715e-03 1.58088005e-05 1.64122172e-02 2.09987498e-04 2.36775051e-03 3.00696083e-05 3.46693669e-05 1.16249910e-04 6.94001559e-03 1.58400853e-05 1.95188422e-05 2.19169408e-04 3.09433235e-04 5.44128183e-04 6.35302160e-04 7.07127433e-03 1.19772732e-04 5.37439200e-06 1.91133395e-02 1.27979312e-02 3.89739592e-03 1.97048103e-05 2.29625002e-05 2.21050854e-04 1.92064399e-04 1.20139657e-05 3.20516920e-05 4.26828819e-06 3.64828011e-05 7.55213068e-06 2.67963973e-03 3.17923805e-05 6.19895945e-05 3.99544797e-06 2.68664648e-04 1.83274597e-02 8.71072552e-05 1.38439747e-04 4.96710254e-06 3.56023484e-05 1.34899991e-03 2.05766381e-04 3.96062108e-03 5.61600551e-03 5.31910664e-05 6.77773132e-05 1.36139952e-02 7.41477634e-05 1.63904135e-03 4.74587978e-06 1.45082246e-04 2.09337009e-06 8.13181920e-04 3.63194500e-04 6.46722084e-03 5.02364383e-05 6.90550078e-05 6.36972545e-05 2.09673337e-04 1.79036579e-05 2.36021675e-04 6.37291942e-06 5.70875318e-06 2.56235455e-03 2.72009202e-04 3.77103061e-05 5.63449021e-06 2.25979857e-05 2.61697169e-05 3.42375762e-03 1.04161156e-02 2.22223607e-05 6.27681802e-05 1.88465419e-04 2.82149922e-05 4.01149562e-04 1.31122259e-04 5.97863036e-05 2.41098423e-05 7.71318519e-05 3.57087993e-04 3.41462255e-05 1.01930054e-04 5.23206063e-06 2.95026781e-04 7.02897159e-05 3.99115682e-02 1.89455808e-03 1.74146010e-06 1.14775894e-05 7.84916210e-06 1.93041191e-03 2.37918808e-03 3.49449110e-03 6.98623667e-03 7.64393993e-03 4.12582303e-05 1.24030013e-03 1.72785169e-03 7.18316660e-05 5.17749111e-04 7.84919783e-03 1.04525541e-04 9.83856899e-06 8.77521088e-05 1.68125369e-02 4.09213862e-05 1.09552668e-04 2.54421811e-05 4.65482954e-05 6.95294410e-04 6.72869501e-05 2.40904570e-04 2.15112406e-04 3.85226776e-05 2.51369456e-05 4.68338234e-03 1.26862462e-04 9.00995801e-04 4.16984549e-05 7.36891707e-06 1.51534463e-04 1.48332631e-03 4.95935837e-03 1.91499032e-02 3.01804044e-04 6.28613270e-05 4.78365598e-03 8.38827982e-05 1.70516931e-02 1.52653758e-03 5.85798814e-04 3.11521399e-05 2.11968741e-04 7.41351105e-05 1.40834545e-05 8.93215940e-04 1.45371505e-05 4.96711982e-05 4.11317131e-04 8.89070239e-03 5.06997202e-03 3.08362325e-03 2.77415646e-04 3.75299685e-04 1.19906381e-05 1.50029315e-03 1.14443043e-04 2.52026439e-05 9.22407198e-04 3.51146841e-03 1.11564566e-06 1.36691102e-04 3.53032886e-03 2.15746608e-04 8.79282816e-05 4.36248304e-03 1.77966576e-04 1.47887832e-03 6.94399816e-04 8.03673174e-04 5.23004041e-04 3.90421192e-04 1.06344873e-03 3.55399796e-04 6.01265463e-04 1.55850008e-04 1.33491016e-03 1.09734829e-04 4.38019342e-04 2.42487862e-04 6.84730615e-03 1.02040754e-03 1.07652310e-03 3.51822848e-04 9.20735547e-05 7.50967592e-04 1.44127226e-02 3.58455327e-05 5.16555374e-05 1.31370616e-03 9.02966480e-04 1.24254671e-03 5.20300702e-04 8.57163919e-04 3.66344648e-05 2.01024144e-04 6.52487564e-04 5.93215809e-04 5.76604251e-03 6.19325438e-04 1.16480421e-03 2.37531040e-05 2.50119111e-03 7.08868974e-05 5.99786472e-05 2.55976247e-05 4.62695534e-05 4.24469297e-04 6.20667648e-04 4.15926515e-05 7.03983005e-06 8.77018738e-06 5.21141301e-05 2.11411956e-04 7.74205779e-04 5.31276630e-04 6.44316664e-04 4.07212786e-03 2.68336060e-03 1.74210854e-05 3.76385942e-05 6.74255705e-03 4.46323538e-05 2.76757801e-05 2.56290223e-04 1.22213329e-04 1.22734054e-03 7.73016480e-04 1.11903930e-02 3.16570923e-02 2.75775470e-04 5.73344238e-04 2.86890985e-03 1.10085262e-03 1.35615155e-05 2.66479654e-03 1.99418981e-03 4.31017601e-04 9.68350447e-04 3.51598108e-04 8.54862970e-04 3.52715979e-05 1.46333405e-04 5.10955288e-05 1.48639630e-03 1.80458324e-03 7.51840998e-05 1.13529910e-04 3.89828119e-06 8.74532212e-04 1.12358983e-04 3.93593837e-05 6.01037289e-04 2.06997487e-04 3.94766452e-03 1.09549124e-04 2.11403880e-04 6.95336203e-04 5.99777419e-03 5.45272342e-05 2.56420486e-03 2.20299728e-04 4.23851707e-05 6.69996080e-04 2.66609713e-04 1.55276459e-04 2.75739990e-02 3.43240798e-03 2.68303775e-05 1.52821158e-04 9.82575657e-05 4.00313947e-05 6.07266993e-05 5.28094570e-05 1.02948405e-04 6.20577412e-05 2.12161940e-05 2.99842539e-03 1.17558768e-04 1.58015324e-03 3.30074807e-04 1.19093776e-04 2.52985101e-05 1.59350988e-02 4.89539379e-05 1.05491054e-05 1.09012712e-04 2.97089737e-05 7.28885690e-03 1.87386977e-05 1.85028894e-05 5.79945299e-05 1.54079917e-05 9.85169099e-05 1.05076749e-03 7.55816349e-04 2.62255053e-05 1.18091421e-05 2.95209320e-05]] Top class: omelette, Probability: 0.03991156816482544 Class: omelette, Probability: 0.03991156816482544 Class: steak, Probability: 0.03165709227323532 Class: tacos, Probability: 0.027573999017477036 Class: breakfast_burrito, Probability: 0.021740607917308807 Class: pulled_pork_sandwich, Probability: 0.01914990320801735 (own): omelette - 3.66shttps://github.com/tensorflow/addons/issues/2807https://github.com/tensorflow/addons 
Help would be appreciated because im slowly losing my mind :(,
Jonas
submitted by Jonasbru3m to computervision [link] [comments]


2024.06.01 14:24 Jonasbru3m TensorFlow Model Only Predicts 2 Classes out of 475

Hello Reddit Community,
For my Bachelor Thesis im currently trying to train my first ever model with tensorflow, but I'm encountering a strange issue where my model only predicts 2 classes out of the 475 possible classes. The model was trained on a HPC with 304 Nvidia A100 and 352 Nvidia A40 GPGPUs in 82 nodes.
Thats my training script:
 import os import tensorflow as tf from tensorflow.keras.preprocessing.image import ImageDataGenerator from tensorflow.keras.applications import EfficientNetB7 from tensorflow.keras import layers, models from tensorflow.keras.callbacks import ModelCheckpoint, TensorBoard import tensorflow_addons as tfa import logging import json # Setup logging logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') # Check if GPUs are available gpus = tf.config.experimental.list_physical_devices('GPU') if gpus: try: for gpu in gpus: tf.config.experimental.set_memory_growth(gpu, True) tf.config.set_visible_devices(gpus, 'GPU') logging.info(f"Using {len(gpus)} GPUs.") except RuntimeError as e: logging.error(e) else: logging.error("No GPUs found. Check your device configuration.") # Data directory data_dir = "/app/FOOD475/" # Image dimensions and batch size img_height, img_width = 600, 600 batch_size = 64 # Data preprocessing and augmentation train_datagen = ImageDataGenerator( rescale=1./255, rotation_range=40, width_shift_range=0.2, height_shift_range=0.2, shear_range=0.2, zoom_range=0.2, horizontal_flip=True, fill_mode='nearest', validation_split=0.25 ) # Load and preprocess images train_generator = train_datagen.flow_from_directory( data_dir, target_size=(img_height, img_width), batch_size=batch_size, class_mode='categorical', subset='training' ) validation_generator = train_datagen.flow_from_directory( data_dir, target_size=(img_height, img_width), batch_size=batch_size, class_mode='categorical', subset='validation' ) # Model creation function def create_model(input_shape, num_classes): base_model = EfficientNetB7(include_top=False, input_shape=input_shape, weights='imagenet') base_model.trainable = True inputs = layers.Input(shape=input_shape) x = base_model(inputs, training=True) x = layers.GlobalAveragePooling2D()(x) outputs = layers.Dense(num_classes, activation='softmax')(x) model = models.Model(inputs, outputs) return model def find_latest_saved_model(checkpoint_dir): logging.info(f"Looking in checkpoint directory: {checkpoint_dir}") if not os.path.exists(checkpoint_dir): logging.error(f"Checkpoint directory does not exist: {checkpoint_dir}") return None, 0 subdirs = [os.path.join(checkpoint_dir, d) for d in os.listdir(checkpoint_dir) if os.path.isdir(os.path.join(checkpoint_dir, d))] if not subdirs: logging.info("No subdirectories found for checkpoints.") return None, 0 latest_subdir = max(subdirs, key=lambda x: int(os.path.basename(x))) latest_epoch = int(os.path.basename(latest_subdir)) logging.info(f"Latest model directory: {latest_subdir}, Epoch: {latest_epoch}") if os.path.exists(os.path.join(latest_subdir, 'saved_model.pb')): return latest_subdir, latest_epoch else: logging.info("No saved_model.pb found in the latest directory.") return None, 0 # Mirrored strategy for multi-GPU training strategy = tf.distribute.MirroredStrategy() with strategy.scope(): saved_model_dir = 'model_training' checkpoint_dir = os.path.join(saved_model_dir, 'checkpoints') latest_saved_model, latest_epoch = find_latest_saved_model(checkpoint_dir) if latest_saved_model: logging.info(f"Loading model from {latest_saved_model}") model = tf.keras.models.load_model(latest_saved_model) else: logging.info("No saved model found. Creating a new model.") model = create_model((img_height, img_width, 3), len(train_generator.class_indices)) if not os.path.exists(saved_model_dir): os.makedirs(saved_model_dir) summary_path = os.path.join(saved_model_dir, 'model_summary.txt') with open(summary_path, 'w') as f: model.summary(print_fn=lambda x: f.write(x + '\n')) logging.info(f"Model summary saved to {summary_path}") optimizer = tf.keras.optimizers.Adam(learning_rate=0.0002) model.compile(optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy', tf.keras.metrics.TopKCategoricalAccuracy(k=5), tfa.metrics.F1Score(num_classes=len(train_generator.class_indices), average='macro')]) # Custom Callback for Saving the Best Model in SavedModel format class SaveBestModelTF(tf.keras.callbacks.Callback): def __init__(self, monitor='val_accuracy', saved_model_dir='model_training'): super(SaveBestModelTF, self).__init__() self.monitor = monitor self.saved_model_dir = saved_model_dir def on_epoch_end(self, epoch, logs=None): current = logs.get(self.monitor) if current is None: logging.warning(f"Monitor '{self.monitor}' for saving the model is not available in logs.") return logging.info(f"Epoch {epoch + 1}: saving model to {self.saved_model_dir}/checkpoints/{epoch + 1}") epoch_path = os.path.join(self.saved_model_dir, 'checkpoints', str(epoch + 1)) if not os.path.exists(epoch_path): os.makedirs(epoch_path) self.model.save(epoch_path, save_format='tf') # Callbacks for monitoring progress tensorboard_cb = TensorBoard(log_dir='./logs') # Save class indices to a JSON file class_indices_path = 'model_training/class_indices.json' if not os.path.exists(os.path.dirname(class_indices_path)): os.makedirs(os.path.dirname(class_indices_path), exist_ok=True) logging.info(f"Directory {os.path.dirname(class_indices_path)} created.") with open(class_indices_path, 'w') as file: json.dump(train_generator.class_indices, file) logging.info(f"Class indices saved to {class_indices_path}") # Model training total_epochs = 7 model.fit( train_generator, initial_epoch=latest_epoch, # Start from the next epoch epochs=total_epochs, validation_data=validation_generator, callbacks=[SaveBestModelTF(saved_model_dir=saved_model_dir), tensorboard_cb] ) # Evaluate the model eval_result = model.evaluate(validation_generator) logging.info(f'Validation Loss: {eval_result[0]}, Validation Accuracy: {eval_result[1]}') # Save the final model as a SavedModel format (including .pb files) model.save('model_training/finished_model') logging.info("Finished model saved in SavedModel format at 'model_training/finished_model'") # Convert to TensorFlow Lite converter = tf.lite.TFLiteConverter.from_saved_model('model_training/finished_model') tflite_model = converter.convert() tflite_path = 'model_training/lite_model/trained_model_lite.tflite' if not os.path.exists(os.path.dirname(tflite_path)): os.makedirs(os.path.dirname(tflite_path), exist_ok=True) logging.info(f"Directory {os.path.dirname(tflite_path)} created.") with open(tflite_path, 'wb') as f: f.write(tflite_model) logging.info(f"Model converted and saved as {tflite_path}") 
During training i got following output:
Found 182235 images belonging to 475 classes. Found 60544 images belonging to 475 classes. Epoch 1/7 2848/2848 [==============================] - 11914s 4s/step - loss: 1.7624 - accuracy: 0.5931 - top_k_categorical_accuracy: 0.8152 - f1_score: 0.4739 - val_loss: 1.1666 - val_accuracy: 0.7043 - val_top_k_categorical_accuracy: 0.9013 - val_f1_score: 0.6053 Epoch 2/7 2848/2848 [==============================] - 11096s 4s/step - loss: 0.8293 - accuracy: 0.7788 - top_k_categorical_accuracy: 0.9435 - f1_score: 0.7094 - val_loss: 0.9409 - val_accuracy: 0.7533 - val_top_k_categorical_accuracy: 0.9277 - val_f1_score: 0.6818 Epoch 3/7 2848/2848 [==============================] - 11123s 4s/step - loss: 0.6247 - accuracy: 0.8274 - top_k_categorical_accuracy: 0.9632 - f1_score: 0.7760 - val_loss: 0.8422 - val_accuracy: 0.7761 - val_top_k_categorical_accuracy: 0.9386 - val_f1_score: 0.7080 Epoch 4/7 2848/2848 [==============================] - 11101s 4s/step - loss: 0.5070 - accuracy: 0.8562 - top_k_categorical_accuracy: 0.9743 - f1_score: 0.8165 - val_loss: 0.8002 - val_accuracy: 0.7885 - val_top_k_categorical_accuracy: 0.9428 - val_f1_score: 0.7249 Epoch 5/7 2848/2848 [==============================] - 11079s 4s/step - loss: 0.4261 - accuracy: 0.8766 - top_k_categorical_accuracy: 0.9814 - f1_score: 0.8445 - val_loss: 0.7757 - val_accuracy: 0.7940 - val_top_k_categorical_accuracy: 0.9458 - val_f1_score: 0.7404 Epoch 6/7 2848/2848 [==============================] - 11100s 4s/step - loss: 0.3641 - accuracy: 0.8932 - top_k_categorical_accuracy: 0.9856 - f1_score: 0.8657 - val_loss: 0.7639 - val_accuracy: 0.8003 - val_top_k_categorical_accuracy: 0.9472 - val_f1_score: 0.7432 Epoch 7/7 2848/2848 [==============================] - 11129s 4s/step - loss: 0.3142 - accuracy: 0.9068 - top_k_categorical_accuracy: 0.9889 - f1_score: 0.8838 - val_loss: 0.7701 - val_accuracy: 0.8014 - val_top_k_categorical_accuracy: 0.9470 - val_f1_score: 0.7474 946/946 [==============================] - 2671s 3s/step - loss: 0.7682 - accuracy: 0.8008 - top_k_categorical_accuracy: 0.9470 - f1_score: 0.7456 
And when I try to load the model and make a prediction with this code:
class own: def __init__(self): if not os.path.exists("models/own"): raise FileNotFoundError(f"Model path models/own does not exist") try: self.model = tf.keras.models.load_model("models/own", custom_objects={'F1Score': F1Score}) except Exception as e: print(f"Error loading model: {e}") raise if not os.path.exists("models/own/class_indices.json"): raise FileNotFoundError(f"Class indices path models/own/class_indices.json does not exist") with open("models/own/class_indices.json", 'r') as file: self.class_indices = json.load(file) self.index_to_class = {v: k for k, v in self.class_indices.items()} def classify(self, img_path): if not os.path.exists(img_path): raise FileNotFoundError(f"Image path {img_path} does not exist") # Load and preprocess the image img = tf.keras.preprocessing.image.load_img(img_path, target_size=(600, 600)) img_array = tf.keras.preprocessing.image.img_to_array(img) img_array = np.expand_dims(img_array, axis=0) img_array /= 255.0 # Make prediction predictions = self.model.predict(img_array) print("Raw predictions:", predictions) top_index = np.argmax(predictions[0]) top_class = self.index_to_class[top_index] print(f"Top class: {top_class}, Probability: {predictions[0][top_index]}") top_n = 5 top_indices = np.argsort(predictions[0])[-top_n:][::-1] for idx in top_indices: print(f"Class: {self.index_to_class[idx]}, Probability: {predictions[0][idx]}") return top_class 
it always either predicts Steak or Omelette:
2024-06-01 14:17:27.571776: I tensorflow/core/util/port.cc:113] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. WARNING:tensorflow:From C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\losses.py:2976: The name tf.losses.sparse_softmax_cross_entropy is deprecated. Please use tf.compat.v1.losses.sparse_softmax_cross_entropy instead. C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\tensorflow_addons\utils\tfa_eol_msg.py:23: UserWarning: TensorFlow Addons (TFA) has ended development and introduction of new features. TFA has entered a minimal maintenance and release mode until a planned end of life in May 2024. Please modify downstream libraries to take dependencies from other repositories in our TensorFlow community (e.g. Keras, Keras-CV, and Keras-NLP). For more information see: warnings.warn( C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\tensorflow_addons\utils\ensure_tf_install.py:53: UserWarning: Tensorflow Addons supports using Python ops for all Tensorflow versions above or equal to 2.12.0 and strictly below 2.15.0 (nightly versions are not supported). The versions of TensorFlow you are currently using is 2.15.0 and is not supported. Some things might work, some things might not. If you were to encounter a bug, do not file an issue. If you want to make sure you're using a tested and supported configuration, either change the TensorFlow version or the TensorFlow Addons's version. You can find the compatibility matrix in TensorFlow Addon's readme: warnings.warn( WARNING:tensorflow:From C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\saving\legacy\saved_model\load.py:107: The name tf.gfile.Exists is deprecated. Please use tf.io.gfile.exists instead. 2024-06-01 14:17:31.363666: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: SSE SSE2 SSE3 SSE4.1 SSE4.2 AVX2 AVX512F AVX512_VNNI AVX512_BF16 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. WARNING:tensorflow:From C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\engine\functional.py:156: The name tf.executing_eagerly_outside_functions is deprecated. Please use tf.compat.v1.executing_eagerly_outside_functions instead. WARNING:tensorflow:From C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\layers\normalization\batch_normalization.py:979: The name tf.nn.fused_batch_norm is deprecated. Please use tf.compat.v1.nn.fused_batch_norm instead. 1/1 [==============================] - 4s 4s/step Raw predictions: [[4.23421043e-05 1.45377373e-06 1.09034730e-02 1.19525917e-04 4.45407240e-05 5.72818244e-05 5.68609731e-03 5.15926695e-05 1.89958355e-05 1.39491487e-04 3.20717366e-03 9.63417915e-06 1.22947793e-03 4.01171012e-04 3.64649204e-05 1.75396308e-05 3.09416023e-03 7.56465085e-03 2.89075997e-05 3.90331191e-03 2.16231216e-03 4.18351328e-06 5.89632022e-04 9.40740295e-03 6.80321036e-03 2.32697069e-03 4.23964392e-03 1.56047070e-04 2.14435873e-04 6.95710623e-05 1.38103365e-04 1.78470847e-03 3.75193194e-03 5.94434096e-03 5.69255608e-05 7.57165905e-03 1.52613886e-03 9.48755944e-04 8.21925176e-04 3.18029453e-03 3.89393512e-03 8.41296278e-05 8.34997976e-04 3.14124190e-04 6.81638776e-04 1.10320523e-02 1.10815199e-04 6.18589204e-03 2.17406079e-02 3.72037102e-05 1.65579877e-05 1.30886221e-02 1.01435784e-04 2.13157946e-05 1.25499619e-05 8.94762017e-03 4.36880719e-03 4.78018774e-03 8.53170827e-03 1.45823974e-02 1.05571962e-05 1.12631078e-05 5.09415939e-03 8.12840741e-03 1.48212257e-05 1.52864438e-02 9.66716034e-05 2.25000476e-04 3.60531732e-04 9.28066402e-06 8.15156789e-04 1.09069003e-02 3.43796797e-04 2.53324561e-05 7.89516326e-03 1.44943051e-05 4.06841224e-04 1.67445414e-05 3.78527766e-05 1.80476491e-04 3.33699776e-04 4.13847056e-06 3.32273915e-03 6.51864940e-03 7.48403618e-05 2.68448726e-04 1.54245936e-03 2.95383972e-03 2.26996126e-05 3.64100002e-03 2.81597768e-05 3.11967051e-05 1.48438021e-05 8.46863433e-04 4.05767525e-04 1.75380992e-04 4.76581818e-06 5.42160356e-04 2.19287374e-03 1.18714366e-02 1.41884899e-04 8.76697595e-06 3.85931274e-03 4.37544841e-05 4.01919424e-05 3.87528981e-03 3.88057524e-05 2.69062322e-04 4.46968805e-03 1.17368818e-05 3.70194939e-05 1.55831876e-04 1.63894765e-05 2.38729117e-04 1.19046052e-03 2.12675819e-04 1.08185853e-03 3.01667496e-05 6.18575094e-03 3.91955400e-05 1.40065713e-05 3.02084809e-04 6.46927813e-03 3.37069832e-05 5.15250103e-05 2.31142567e-05 2.20274273e-03 3.17445702e-05 1.04452763e-02 6.80019803e-05 7.81101780e-03 1.23853814e-02 1.04819983e-02 3.20679283e-05 6.71340758e-03 6.94293885e-06 1.98310101e-03 5.29599565e-05 9.02036484e-03 4.57535089e-06 1.93145883e-03 4.06190008e-03 8.42716638e-03 1.50314684e-03 8.58115556e-04 1.22383237e-03 8.49474862e-04 5.48258470e-03 6.09953167e-05 1.57669128e-03 5.43692382e-03 4.88058169e-04 6.75312986e-05 3.43937165e-04 1.93276245e-03 4.06867871e-03 5.20323374e-05 7.78318281e-05 1.93508764e-04 1.14409677e-05 2.21324177e-03 1.90052821e-03 8.52691382e-03 2.43102224e-03 2.88419239e-03 2.53974522e-05 9.51182563e-04 2.32981285e-03 9.86064842e-05 4.14316915e-03 1.66544644e-03 1.02754391e-04 3.95776224e-05 3.02393187e-06 1.32082617e-02 4.14707232e-04 3.40229672e-05 4.81802830e-03 1.90598912e-05 4.08358377e-04 5.95443300e-04 1.22634810e-04 5.74091624e-04 8.57623760e-03 2.60962266e-03 2.95263715e-03 1.58088005e-05 1.64122172e-02 2.09987498e-04 2.36775051e-03 3.00696083e-05 3.46693669e-05 1.16249910e-04 6.94001559e-03 1.58400853e-05 1.95188422e-05 2.19169408e-04 3.09433235e-04 5.44128183e-04 6.35302160e-04 7.07127433e-03 1.19772732e-04 5.37439200e-06 1.91133395e-02 1.27979312e-02 3.89739592e-03 1.97048103e-05 2.29625002e-05 2.21050854e-04 1.92064399e-04 1.20139657e-05 3.20516920e-05 4.26828819e-06 3.64828011e-05 7.55213068e-06 2.67963973e-03 3.17923805e-05 6.19895945e-05 3.99544797e-06 2.68664648e-04 1.83274597e-02 8.71072552e-05 1.38439747e-04 4.96710254e-06 3.56023484e-05 1.34899991e-03 2.05766381e-04 3.96062108e-03 5.61600551e-03 5.31910664e-05 6.77773132e-05 1.36139952e-02 7.41477634e-05 1.63904135e-03 4.74587978e-06 1.45082246e-04 2.09337009e-06 8.13181920e-04 3.63194500e-04 6.46722084e-03 5.02364383e-05 6.90550078e-05 6.36972545e-05 2.09673337e-04 1.79036579e-05 2.36021675e-04 6.37291942e-06 5.70875318e-06 2.56235455e-03 2.72009202e-04 3.77103061e-05 5.63449021e-06 2.25979857e-05 2.61697169e-05 3.42375762e-03 1.04161156e-02 2.22223607e-05 6.27681802e-05 1.88465419e-04 2.82149922e-05 4.01149562e-04 1.31122259e-04 5.97863036e-05 2.41098423e-05 7.71318519e-05 3.57087993e-04 3.41462255e-05 1.01930054e-04 5.23206063e-06 2.95026781e-04 7.02897159e-05 3.99115682e-02 1.89455808e-03 1.74146010e-06 1.14775894e-05 7.84916210e-06 1.93041191e-03 2.37918808e-03 3.49449110e-03 6.98623667e-03 7.64393993e-03 4.12582303e-05 1.24030013e-03 1.72785169e-03 7.18316660e-05 5.17749111e-04 7.84919783e-03 1.04525541e-04 9.83856899e-06 8.77521088e-05 1.68125369e-02 4.09213862e-05 1.09552668e-04 2.54421811e-05 4.65482954e-05 6.95294410e-04 6.72869501e-05 2.40904570e-04 2.15112406e-04 3.85226776e-05 2.51369456e-05 4.68338234e-03 1.26862462e-04 9.00995801e-04 4.16984549e-05 7.36891707e-06 1.51534463e-04 1.48332631e-03 4.95935837e-03 1.91499032e-02 3.01804044e-04 6.28613270e-05 4.78365598e-03 8.38827982e-05 1.70516931e-02 1.52653758e-03 5.85798814e-04 3.11521399e-05 2.11968741e-04 7.41351105e-05 1.40834545e-05 8.93215940e-04 1.45371505e-05 4.96711982e-05 4.11317131e-04 8.89070239e-03 5.06997202e-03 3.08362325e-03 2.77415646e-04 3.75299685e-04 1.19906381e-05 1.50029315e-03 1.14443043e-04 2.52026439e-05 9.22407198e-04 3.51146841e-03 1.11564566e-06 1.36691102e-04 3.53032886e-03 2.15746608e-04 8.79282816e-05 4.36248304e-03 1.77966576e-04 1.47887832e-03 6.94399816e-04 8.03673174e-04 5.23004041e-04 3.90421192e-04 1.06344873e-03 3.55399796e-04 6.01265463e-04 1.55850008e-04 1.33491016e-03 1.09734829e-04 4.38019342e-04 2.42487862e-04 6.84730615e-03 1.02040754e-03 1.07652310e-03 3.51822848e-04 9.20735547e-05 7.50967592e-04 1.44127226e-02 3.58455327e-05 5.16555374e-05 1.31370616e-03 9.02966480e-04 1.24254671e-03 5.20300702e-04 8.57163919e-04 3.66344648e-05 2.01024144e-04 6.52487564e-04 5.93215809e-04 5.76604251e-03 6.19325438e-04 1.16480421e-03 2.37531040e-05 2.50119111e-03 7.08868974e-05 5.99786472e-05 2.55976247e-05 4.62695534e-05 4.24469297e-04 6.20667648e-04 4.15926515e-05 7.03983005e-06 8.77018738e-06 5.21141301e-05 2.11411956e-04 7.74205779e-04 5.31276630e-04 6.44316664e-04 4.07212786e-03 2.68336060e-03 1.74210854e-05 3.76385942e-05 6.74255705e-03 4.46323538e-05 2.76757801e-05 2.56290223e-04 1.22213329e-04 1.22734054e-03 7.73016480e-04 1.11903930e-02 3.16570923e-02 2.75775470e-04 5.73344238e-04 2.86890985e-03 1.10085262e-03 1.35615155e-05 2.66479654e-03 1.99418981e-03 4.31017601e-04 9.68350447e-04 3.51598108e-04 8.54862970e-04 3.52715979e-05 1.46333405e-04 5.10955288e-05 1.48639630e-03 1.80458324e-03 7.51840998e-05 1.13529910e-04 3.89828119e-06 8.74532212e-04 1.12358983e-04 3.93593837e-05 6.01037289e-04 2.06997487e-04 3.94766452e-03 1.09549124e-04 2.11403880e-04 6.95336203e-04 5.99777419e-03 5.45272342e-05 2.56420486e-03 2.20299728e-04 4.23851707e-05 6.69996080e-04 2.66609713e-04 1.55276459e-04 2.75739990e-02 3.43240798e-03 2.68303775e-05 1.52821158e-04 9.82575657e-05 4.00313947e-05 6.07266993e-05 5.28094570e-05 1.02948405e-04 6.20577412e-05 2.12161940e-05 2.99842539e-03 1.17558768e-04 1.58015324e-03 3.30074807e-04 1.19093776e-04 2.52985101e-05 1.59350988e-02 4.89539379e-05 1.05491054e-05 1.09012712e-04 2.97089737e-05 7.28885690e-03 1.87386977e-05 1.85028894e-05 5.79945299e-05 1.54079917e-05 9.85169099e-05 1.05076749e-03 7.55816349e-04 2.62255053e-05 1.18091421e-05 2.95209320e-05]] Top class: omelette, Probability: 0.03991156816482544 Class: omelette, Probability: 0.03991156816482544 Class: steak, Probability: 0.03165709227323532 Class: tacos, Probability: 0.027573999017477036 Class: breakfast_burrito, Probability: 0.021740607917308807 Class: pulled_pork_sandwich, Probability: 0.01914990320801735 (own): omelette - 3.66shttps://github.com/tensorflow/addons/issues/2807https://github.com/tensorflow/addons 
Help would be appreciated because im slowly losing my mind :(,
Jonas
submitted by Jonasbru3m to learnmachinelearning [link] [comments]


2024.06.01 14:23 Jonasbru3m TensorFlow Model Only Predicts 2 Classes out of 475

Hello Reddit Community,
For my Bachelor Thesis im currently trying to train my first ever model with tensorflow, but I'm encountering a strange issue where my model only predicts 2 classes out of the 475 possible classes. The model was trained on a HPC with 304 Nvidia A100 and 352 Nvidia A40 GPGPUs in 82 nodes.
Thats my training script:
 import os import tensorflow as tf from tensorflow.keras.preprocessing.image import ImageDataGenerator from tensorflow.keras.applications import EfficientNetB7 from tensorflow.keras import layers, models from tensorflow.keras.callbacks import ModelCheckpoint, TensorBoard import tensorflow_addons as tfa import logging import json # Setup logging logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') # Check if GPUs are available gpus = tf.config.experimental.list_physical_devices('GPU') if gpus: try: for gpu in gpus: tf.config.experimental.set_memory_growth(gpu, True) tf.config.set_visible_devices(gpus, 'GPU') logging.info(f"Using {len(gpus)} GPUs.") except RuntimeError as e: logging.error(e) else: logging.error("No GPUs found. Check your device configuration.") # Data directory data_dir = "/app/FOOD475/" # Image dimensions and batch size img_height, img_width = 600, 600 batch_size = 64 # Data preprocessing and augmentation train_datagen = ImageDataGenerator( rescale=1./255, rotation_range=40, width_shift_range=0.2, height_shift_range=0.2, shear_range=0.2, zoom_range=0.2, horizontal_flip=True, fill_mode='nearest', validation_split=0.25 ) # Load and preprocess images train_generator = train_datagen.flow_from_directory( data_dir, target_size=(img_height, img_width), batch_size=batch_size, class_mode='categorical', subset='training' ) validation_generator = train_datagen.flow_from_directory( data_dir, target_size=(img_height, img_width), batch_size=batch_size, class_mode='categorical', subset='validation' ) # Model creation function def create_model(input_shape, num_classes): base_model = EfficientNetB7(include_top=False, input_shape=input_shape, weights='imagenet') base_model.trainable = True inputs = layers.Input(shape=input_shape) x = base_model(inputs, training=True) x = layers.GlobalAveragePooling2D()(x) outputs = layers.Dense(num_classes, activation='softmax')(x) model = models.Model(inputs, outputs) return model def find_latest_saved_model(checkpoint_dir): logging.info(f"Looking in checkpoint directory: {checkpoint_dir}") if not os.path.exists(checkpoint_dir): logging.error(f"Checkpoint directory does not exist: {checkpoint_dir}") return None, 0 subdirs = [os.path.join(checkpoint_dir, d) for d in os.listdir(checkpoint_dir) if os.path.isdir(os.path.join(checkpoint_dir, d))] if not subdirs: logging.info("No subdirectories found for checkpoints.") return None, 0 latest_subdir = max(subdirs, key=lambda x: int(os.path.basename(x))) latest_epoch = int(os.path.basename(latest_subdir)) logging.info(f"Latest model directory: {latest_subdir}, Epoch: {latest_epoch}") if os.path.exists(os.path.join(latest_subdir, 'saved_model.pb')): return latest_subdir, latest_epoch else: logging.info("No saved_model.pb found in the latest directory.") return None, 0 # Mirrored strategy for multi-GPU training strategy = tf.distribute.MirroredStrategy() with strategy.scope(): saved_model_dir = 'model_training' checkpoint_dir = os.path.join(saved_model_dir, 'checkpoints') latest_saved_model, latest_epoch = find_latest_saved_model(checkpoint_dir) if latest_saved_model: logging.info(f"Loading model from {latest_saved_model}") model = tf.keras.models.load_model(latest_saved_model) else: logging.info("No saved model found. Creating a new model.") model = create_model((img_height, img_width, 3), len(train_generator.class_indices)) if not os.path.exists(saved_model_dir): os.makedirs(saved_model_dir) summary_path = os.path.join(saved_model_dir, 'model_summary.txt') with open(summary_path, 'w') as f: model.summary(print_fn=lambda x: f.write(x + '\n')) logging.info(f"Model summary saved to {summary_path}") optimizer = tf.keras.optimizers.Adam(learning_rate=0.0002) model.compile(optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy', tf.keras.metrics.TopKCategoricalAccuracy(k=5), tfa.metrics.F1Score(num_classes=len(train_generator.class_indices), average='macro')]) # Custom Callback for Saving the Best Model in SavedModel format class SaveBestModelTF(tf.keras.callbacks.Callback): def __init__(self, monitor='val_accuracy', saved_model_dir='model_training'): super(SaveBestModelTF, self).__init__() self.monitor = monitor self.saved_model_dir = saved_model_dir def on_epoch_end(self, epoch, logs=None): current = logs.get(self.monitor) if current is None: logging.warning(f"Monitor '{self.monitor}' for saving the model is not available in logs.") return logging.info(f"Epoch {epoch + 1}: saving model to {self.saved_model_dir}/checkpoints/{epoch + 1}") epoch_path = os.path.join(self.saved_model_dir, 'checkpoints', str(epoch + 1)) if not os.path.exists(epoch_path): os.makedirs(epoch_path) self.model.save(epoch_path, save_format='tf') # Callbacks for monitoring progress tensorboard_cb = TensorBoard(log_dir='./logs') # Save class indices to a JSON file class_indices_path = 'model_training/class_indices.json' if not os.path.exists(os.path.dirname(class_indices_path)): os.makedirs(os.path.dirname(class_indices_path), exist_ok=True) logging.info(f"Directory {os.path.dirname(class_indices_path)} created.") with open(class_indices_path, 'w') as file: json.dump(train_generator.class_indices, file) logging.info(f"Class indices saved to {class_indices_path}") # Model training total_epochs = 7 model.fit( train_generator, initial_epoch=latest_epoch, # Start from the next epoch epochs=total_epochs, validation_data=validation_generator, callbacks=[SaveBestModelTF(saved_model_dir=saved_model_dir), tensorboard_cb] ) # Evaluate the model eval_result = model.evaluate(validation_generator) logging.info(f'Validation Loss: {eval_result[0]}, Validation Accuracy: {eval_result[1]}') # Save the final model as a SavedModel format (including .pb files) model.save('model_training/finished_model') logging.info("Finished model saved in SavedModel format at 'model_training/finished_model'") # Convert to TensorFlow Lite converter = tf.lite.TFLiteConverter.from_saved_model('model_training/finished_model') tflite_model = converter.convert() tflite_path = 'model_training/lite_model/trained_model_lite.tflite' if not os.path.exists(os.path.dirname(tflite_path)): os.makedirs(os.path.dirname(tflite_path), exist_ok=True) logging.info(f"Directory {os.path.dirname(tflite_path)} created.") with open(tflite_path, 'wb') as f: f.write(tflite_model) logging.info(f"Model converted and saved as {tflite_path}") 
During training i got following output:
Found 182235 images belonging to 475 classes. Found 60544 images belonging to 475 classes. Epoch 1/7 2848/2848 [==============================] - 11914s 4s/step - loss: 1.7624 - accuracy: 0.5931 - top_k_categorical_accuracy: 0.8152 - f1_score: 0.4739 - val_loss: 1.1666 - val_accuracy: 0.7043 - val_top_k_categorical_accuracy: 0.9013 - val_f1_score: 0.6053 Epoch 2/7 2848/2848 [==============================] - 11096s 4s/step - loss: 0.8293 - accuracy: 0.7788 - top_k_categorical_accuracy: 0.9435 - f1_score: 0.7094 - val_loss: 0.9409 - val_accuracy: 0.7533 - val_top_k_categorical_accuracy: 0.9277 - val_f1_score: 0.6818 Epoch 3/7 2848/2848 [==============================] - 11123s 4s/step - loss: 0.6247 - accuracy: 0.8274 - top_k_categorical_accuracy: 0.9632 - f1_score: 0.7760 - val_loss: 0.8422 - val_accuracy: 0.7761 - val_top_k_categorical_accuracy: 0.9386 - val_f1_score: 0.7080 Epoch 4/7 2848/2848 [==============================] - 11101s 4s/step - loss: 0.5070 - accuracy: 0.8562 - top_k_categorical_accuracy: 0.9743 - f1_score: 0.8165 - val_loss: 0.8002 - val_accuracy: 0.7885 - val_top_k_categorical_accuracy: 0.9428 - val_f1_score: 0.7249 Epoch 5/7 2848/2848 [==============================] - 11079s 4s/step - loss: 0.4261 - accuracy: 0.8766 - top_k_categorical_accuracy: 0.9814 - f1_score: 0.8445 - val_loss: 0.7757 - val_accuracy: 0.7940 - val_top_k_categorical_accuracy: 0.9458 - val_f1_score: 0.7404 Epoch 6/7 2848/2848 [==============================] - 11100s 4s/step - loss: 0.3641 - accuracy: 0.8932 - top_k_categorical_accuracy: 0.9856 - f1_score: 0.8657 - val_loss: 0.7639 - val_accuracy: 0.8003 - val_top_k_categorical_accuracy: 0.9472 - val_f1_score: 0.7432 Epoch 7/7 2848/2848 [==============================] - 11129s 4s/step - loss: 0.3142 - accuracy: 0.9068 - top_k_categorical_accuracy: 0.9889 - f1_score: 0.8838 - val_loss: 0.7701 - val_accuracy: 0.8014 - val_top_k_categorical_accuracy: 0.9470 - val_f1_score: 0.7474 946/946 [==============================] - 2671s 3s/step - loss: 0.7682 - accuracy: 0.8008 - top_k_categorical_accuracy: 0.9470 - f1_score: 0.7456 
And when I try to load the model and make a prediction with this code:
class own: def __init__(self): if not os.path.exists("models/own"): raise FileNotFoundError(f"Model path models/own does not exist") try: self.model = tf.keras.models.load_model("models/own", custom_objects={'F1Score': F1Score}) except Exception as e: print(f"Error loading model: {e}") raise if not os.path.exists("models/own/class_indices.json"): raise FileNotFoundError(f"Class indices path models/own/class_indices.json does not exist") with open("models/own/class_indices.json", 'r') as file: self.class_indices = json.load(file) self.index_to_class = {v: k for k, v in self.class_indices.items()} def classify(self, img_path): if not os.path.exists(img_path): raise FileNotFoundError(f"Image path {img_path} does not exist") # Load and preprocess the image img = tf.keras.preprocessing.image.load_img(img_path, target_size=(600, 600)) img_array = tf.keras.preprocessing.image.img_to_array(img) img_array = np.expand_dims(img_array, axis=0) img_array /= 255.0 # Make prediction predictions = self.model.predict(img_array) print("Raw predictions:", predictions) top_index = np.argmax(predictions[0]) top_class = self.index_to_class[top_index] print(f"Top class: {top_class}, Probability: {predictions[0][top_index]}") top_n = 5 top_indices = np.argsort(predictions[0])[-top_n:][::-1] for idx in top_indices: print(f"Class: {self.index_to_class[idx]}, Probability: {predictions[0][idx]}") return top_class 
it always either predicts Steak or Omelette:
2024-06-01 14:17:27.571776: I tensorflow/core/util/port.cc:113] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. WARNING:tensorflow:From C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\losses.py:2976: The name tf.losses.sparse_softmax_cross_entropy is deprecated. Please use tf.compat.v1.losses.sparse_softmax_cross_entropy instead. C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\tensorflow_addons\utils\tfa_eol_msg.py:23: UserWarning: TensorFlow Addons (TFA) has ended development and introduction of new features. TFA has entered a minimal maintenance and release mode until a planned end of life in May 2024. Please modify downstream libraries to take dependencies from other repositories in our TensorFlow community (e.g. Keras, Keras-CV, and Keras-NLP). For more information see: warnings.warn( C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\tensorflow_addons\utils\ensure_tf_install.py:53: UserWarning: Tensorflow Addons supports using Python ops for all Tensorflow versions above or equal to 2.12.0 and strictly below 2.15.0 (nightly versions are not supported). The versions of TensorFlow you are currently using is 2.15.0 and is not supported. Some things might work, some things might not. If you were to encounter a bug, do not file an issue. If you want to make sure you're using a tested and supported configuration, either change the TensorFlow version or the TensorFlow Addons's version. You can find the compatibility matrix in TensorFlow Addon's readme: warnings.warn( WARNING:tensorflow:From C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\saving\legacy\saved_model\load.py:107: The name tf.gfile.Exists is deprecated. Please use tf.io.gfile.exists instead. 2024-06-01 14:17:31.363666: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: SSE SSE2 SSE3 SSE4.1 SSE4.2 AVX2 AVX512F AVX512_VNNI AVX512_BF16 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. WARNING:tensorflow:From C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\engine\functional.py:156: The name tf.executing_eagerly_outside_functions is deprecated. Please use tf.compat.v1.executing_eagerly_outside_functions instead. WARNING:tensorflow:From C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\layers\normalization\batch_normalization.py:979: The name tf.nn.fused_batch_norm is deprecated. Please use tf.compat.v1.nn.fused_batch_norm instead. 1/1 [==============================] - 4s 4s/step Raw predictions: [[4.23421043e-05 1.45377373e-06 1.09034730e-02 1.19525917e-04 4.45407240e-05 5.72818244e-05 5.68609731e-03 5.15926695e-05 1.89958355e-05 1.39491487e-04 3.20717366e-03 9.63417915e-06 1.22947793e-03 4.01171012e-04 3.64649204e-05 1.75396308e-05 3.09416023e-03 7.56465085e-03 2.89075997e-05 3.90331191e-03 2.16231216e-03 4.18351328e-06 5.89632022e-04 9.40740295e-03 6.80321036e-03 2.32697069e-03 4.23964392e-03 1.56047070e-04 2.14435873e-04 6.95710623e-05 1.38103365e-04 1.78470847e-03 3.75193194e-03 5.94434096e-03 5.69255608e-05 7.57165905e-03 1.52613886e-03 9.48755944e-04 8.21925176e-04 3.18029453e-03 3.89393512e-03 8.41296278e-05 8.34997976e-04 3.14124190e-04 6.81638776e-04 1.10320523e-02 1.10815199e-04 6.18589204e-03 2.17406079e-02 3.72037102e-05 1.65579877e-05 1.30886221e-02 1.01435784e-04 2.13157946e-05 1.25499619e-05 8.94762017e-03 4.36880719e-03 4.78018774e-03 8.53170827e-03 1.45823974e-02 1.05571962e-05 1.12631078e-05 5.09415939e-03 8.12840741e-03 1.48212257e-05 1.52864438e-02 9.66716034e-05 2.25000476e-04 3.60531732e-04 9.28066402e-06 8.15156789e-04 1.09069003e-02 3.43796797e-04 2.53324561e-05 7.89516326e-03 1.44943051e-05 4.06841224e-04 1.67445414e-05 3.78527766e-05 1.80476491e-04 3.33699776e-04 4.13847056e-06 3.32273915e-03 6.51864940e-03 7.48403618e-05 2.68448726e-04 1.54245936e-03 2.95383972e-03 2.26996126e-05 3.64100002e-03 2.81597768e-05 3.11967051e-05 1.48438021e-05 8.46863433e-04 4.05767525e-04 1.75380992e-04 4.76581818e-06 5.42160356e-04 2.19287374e-03 1.18714366e-02 1.41884899e-04 8.76697595e-06 3.85931274e-03 4.37544841e-05 4.01919424e-05 3.87528981e-03 3.88057524e-05 2.69062322e-04 4.46968805e-03 1.17368818e-05 3.70194939e-05 1.55831876e-04 1.63894765e-05 2.38729117e-04 1.19046052e-03 2.12675819e-04 1.08185853e-03 3.01667496e-05 6.18575094e-03 3.91955400e-05 1.40065713e-05 3.02084809e-04 6.46927813e-03 3.37069832e-05 5.15250103e-05 2.31142567e-05 2.20274273e-03 3.17445702e-05 1.04452763e-02 6.80019803e-05 7.81101780e-03 1.23853814e-02 1.04819983e-02 3.20679283e-05 6.71340758e-03 6.94293885e-06 1.98310101e-03 5.29599565e-05 9.02036484e-03 4.57535089e-06 1.93145883e-03 4.06190008e-03 8.42716638e-03 1.50314684e-03 8.58115556e-04 1.22383237e-03 8.49474862e-04 5.48258470e-03 6.09953167e-05 1.57669128e-03 5.43692382e-03 4.88058169e-04 6.75312986e-05 3.43937165e-04 1.93276245e-03 4.06867871e-03 5.20323374e-05 7.78318281e-05 1.93508764e-04 1.14409677e-05 2.21324177e-03 1.90052821e-03 8.52691382e-03 2.43102224e-03 2.88419239e-03 2.53974522e-05 9.51182563e-04 2.32981285e-03 9.86064842e-05 4.14316915e-03 1.66544644e-03 1.02754391e-04 3.95776224e-05 3.02393187e-06 1.32082617e-02 4.14707232e-04 3.40229672e-05 4.81802830e-03 1.90598912e-05 4.08358377e-04 5.95443300e-04 1.22634810e-04 5.74091624e-04 8.57623760e-03 2.60962266e-03 2.95263715e-03 1.58088005e-05 1.64122172e-02 2.09987498e-04 2.36775051e-03 3.00696083e-05 3.46693669e-05 1.16249910e-04 6.94001559e-03 1.58400853e-05 1.95188422e-05 2.19169408e-04 3.09433235e-04 5.44128183e-04 6.35302160e-04 7.07127433e-03 1.19772732e-04 5.37439200e-06 1.91133395e-02 1.27979312e-02 3.89739592e-03 1.97048103e-05 2.29625002e-05 2.21050854e-04 1.92064399e-04 1.20139657e-05 3.20516920e-05 4.26828819e-06 3.64828011e-05 7.55213068e-06 2.67963973e-03 3.17923805e-05 6.19895945e-05 3.99544797e-06 2.68664648e-04 1.83274597e-02 8.71072552e-05 1.38439747e-04 4.96710254e-06 3.56023484e-05 1.34899991e-03 2.05766381e-04 3.96062108e-03 5.61600551e-03 5.31910664e-05 6.77773132e-05 1.36139952e-02 7.41477634e-05 1.63904135e-03 4.74587978e-06 1.45082246e-04 2.09337009e-06 8.13181920e-04 3.63194500e-04 6.46722084e-03 5.02364383e-05 6.90550078e-05 6.36972545e-05 2.09673337e-04 1.79036579e-05 2.36021675e-04 6.37291942e-06 5.70875318e-06 2.56235455e-03 2.72009202e-04 3.77103061e-05 5.63449021e-06 2.25979857e-05 2.61697169e-05 3.42375762e-03 1.04161156e-02 2.22223607e-05 6.27681802e-05 1.88465419e-04 2.82149922e-05 4.01149562e-04 1.31122259e-04 5.97863036e-05 2.41098423e-05 7.71318519e-05 3.57087993e-04 3.41462255e-05 1.01930054e-04 5.23206063e-06 2.95026781e-04 7.02897159e-05 3.99115682e-02 1.89455808e-03 1.74146010e-06 1.14775894e-05 7.84916210e-06 1.93041191e-03 2.37918808e-03 3.49449110e-03 6.98623667e-03 7.64393993e-03 4.12582303e-05 1.24030013e-03 1.72785169e-03 7.18316660e-05 5.17749111e-04 7.84919783e-03 1.04525541e-04 9.83856899e-06 8.77521088e-05 1.68125369e-02 4.09213862e-05 1.09552668e-04 2.54421811e-05 4.65482954e-05 6.95294410e-04 6.72869501e-05 2.40904570e-04 2.15112406e-04 3.85226776e-05 2.51369456e-05 4.68338234e-03 1.26862462e-04 9.00995801e-04 4.16984549e-05 7.36891707e-06 1.51534463e-04 1.48332631e-03 4.95935837e-03 1.91499032e-02 3.01804044e-04 6.28613270e-05 4.78365598e-03 8.38827982e-05 1.70516931e-02 1.52653758e-03 5.85798814e-04 3.11521399e-05 2.11968741e-04 7.41351105e-05 1.40834545e-05 8.93215940e-04 1.45371505e-05 4.96711982e-05 4.11317131e-04 8.89070239e-03 5.06997202e-03 3.08362325e-03 2.77415646e-04 3.75299685e-04 1.19906381e-05 1.50029315e-03 1.14443043e-04 2.52026439e-05 9.22407198e-04 3.51146841e-03 1.11564566e-06 1.36691102e-04 3.53032886e-03 2.15746608e-04 8.79282816e-05 4.36248304e-03 1.77966576e-04 1.47887832e-03 6.94399816e-04 8.03673174e-04 5.23004041e-04 3.90421192e-04 1.06344873e-03 3.55399796e-04 6.01265463e-04 1.55850008e-04 1.33491016e-03 1.09734829e-04 4.38019342e-04 2.42487862e-04 6.84730615e-03 1.02040754e-03 1.07652310e-03 3.51822848e-04 9.20735547e-05 7.50967592e-04 1.44127226e-02 3.58455327e-05 5.16555374e-05 1.31370616e-03 9.02966480e-04 1.24254671e-03 5.20300702e-04 8.57163919e-04 3.66344648e-05 2.01024144e-04 6.52487564e-04 5.93215809e-04 5.76604251e-03 6.19325438e-04 1.16480421e-03 2.37531040e-05 2.50119111e-03 7.08868974e-05 5.99786472e-05 2.55976247e-05 4.62695534e-05 4.24469297e-04 6.20667648e-04 4.15926515e-05 7.03983005e-06 8.77018738e-06 5.21141301e-05 2.11411956e-04 7.74205779e-04 5.31276630e-04 6.44316664e-04 4.07212786e-03 2.68336060e-03 1.74210854e-05 3.76385942e-05 6.74255705e-03 4.46323538e-05 2.76757801e-05 2.56290223e-04 1.22213329e-04 1.22734054e-03 7.73016480e-04 1.11903930e-02 3.16570923e-02 2.75775470e-04 5.73344238e-04 2.86890985e-03 1.10085262e-03 1.35615155e-05 2.66479654e-03 1.99418981e-03 4.31017601e-04 9.68350447e-04 3.51598108e-04 8.54862970e-04 3.52715979e-05 1.46333405e-04 5.10955288e-05 1.48639630e-03 1.80458324e-03 7.51840998e-05 1.13529910e-04 3.89828119e-06 8.74532212e-04 1.12358983e-04 3.93593837e-05 6.01037289e-04 2.06997487e-04 3.94766452e-03 1.09549124e-04 2.11403880e-04 6.95336203e-04 5.99777419e-03 5.45272342e-05 2.56420486e-03 2.20299728e-04 4.23851707e-05 6.69996080e-04 2.66609713e-04 1.55276459e-04 2.75739990e-02 3.43240798e-03 2.68303775e-05 1.52821158e-04 9.82575657e-05 4.00313947e-05 6.07266993e-05 5.28094570e-05 1.02948405e-04 6.20577412e-05 2.12161940e-05 2.99842539e-03 1.17558768e-04 1.58015324e-03 3.30074807e-04 1.19093776e-04 2.52985101e-05 1.59350988e-02 4.89539379e-05 1.05491054e-05 1.09012712e-04 2.97089737e-05 7.28885690e-03 1.87386977e-05 1.85028894e-05 5.79945299e-05 1.54079917e-05 9.85169099e-05 1.05076749e-03 7.55816349e-04 2.62255053e-05 1.18091421e-05 2.95209320e-05]] Top class: omelette, Probability: 0.03991156816482544 Class: omelette, Probability: 0.03991156816482544 Class: steak, Probability: 0.03165709227323532 Class: tacos, Probability: 0.027573999017477036 Class: breakfast_burrito, Probability: 0.021740607917308807 Class: pulled_pork_sandwich, Probability: 0.01914990320801735 (own): omelette - 3.66shttps://github.com/tensorflow/addons/issues/2807https://github.com/tensorflow/addons 
Help would be appreciated because im slowly losing my mind :(,
Jonas
submitted by Jonasbru3m to deeplearning [link] [comments]


2024.06.01 14:21 Jonasbru3m TensorFlow Model Only Predicts 2 Classes out of 475

Hello Reddit Community,
For my Bachelor Thesis im currently trying to train my first ever model with tensorflow, but I'm encountering a strange issue where my model only predicts 2 classes out of the 475 possible classes. The model was trained on a HPC with 304 Nvidia A100 and 352 Nvidia A40 GPGPUs in 82 nodes.
Thats my training script:
 import os import tensorflow as tf from tensorflow.keras.preprocessing.image import ImageDataGenerator from tensorflow.keras.applications import EfficientNetB7 from tensorflow.keras import layers, models from tensorflow.keras.callbacks import ModelCheckpoint, TensorBoard import tensorflow_addons as tfa import logging import json # Setup logging logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') # Check if GPUs are available gpus = tf.config.experimental.list_physical_devices('GPU') if gpus: try: for gpu in gpus: tf.config.experimental.set_memory_growth(gpu, True) tf.config.set_visible_devices(gpus, 'GPU') logging.info(f"Using {len(gpus)} GPUs.") except RuntimeError as e: logging.error(e) else: logging.error("No GPUs found. Check your device configuration.") # Data directory data_dir = "/app/FOOD475/" # Image dimensions and batch size img_height, img_width = 600, 600 batch_size = 64 # Data preprocessing and augmentation train_datagen = ImageDataGenerator( rescale=1./255, rotation_range=40, width_shift_range=0.2, height_shift_range=0.2, shear_range=0.2, zoom_range=0.2, horizontal_flip=True, fill_mode='nearest', validation_split=0.25 ) # Load and preprocess images train_generator = train_datagen.flow_from_directory( data_dir, target_size=(img_height, img_width), batch_size=batch_size, class_mode='categorical', subset='training' ) validation_generator = train_datagen.flow_from_directory( data_dir, target_size=(img_height, img_width), batch_size=batch_size, class_mode='categorical', subset='validation' ) # Model creation function def create_model(input_shape, num_classes): base_model = EfficientNetB7(include_top=False, input_shape=input_shape, weights='imagenet') base_model.trainable = True inputs = layers.Input(shape=input_shape) x = base_model(inputs, training=True) x = layers.GlobalAveragePooling2D()(x) outputs = layers.Dense(num_classes, activation='softmax')(x) model = models.Model(inputs, outputs) return model def find_latest_saved_model(checkpoint_dir): logging.info(f"Looking in checkpoint directory: {checkpoint_dir}") if not os.path.exists(checkpoint_dir): logging.error(f"Checkpoint directory does not exist: {checkpoint_dir}") return None, 0 subdirs = [os.path.join(checkpoint_dir, d) for d in os.listdir(checkpoint_dir) if os.path.isdir(os.path.join(checkpoint_dir, d))] if not subdirs: logging.info("No subdirectories found for checkpoints.") return None, 0 latest_subdir = max(subdirs, key=lambda x: int(os.path.basename(x))) latest_epoch = int(os.path.basename(latest_subdir)) logging.info(f"Latest model directory: {latest_subdir}, Epoch: {latest_epoch}") if os.path.exists(os.path.join(latest_subdir, 'saved_model.pb')): return latest_subdir, latest_epoch else: logging.info("No saved_model.pb found in the latest directory.") return None, 0 # Mirrored strategy for multi-GPU training strategy = tf.distribute.MirroredStrategy() with strategy.scope(): saved_model_dir = 'model_training' checkpoint_dir = os.path.join(saved_model_dir, 'checkpoints') latest_saved_model, latest_epoch = find_latest_saved_model(checkpoint_dir) if latest_saved_model: logging.info(f"Loading model from {latest_saved_model}") model = tf.keras.models.load_model(latest_saved_model) else: logging.info("No saved model found. Creating a new model.") model = create_model((img_height, img_width, 3), len(train_generator.class_indices)) if not os.path.exists(saved_model_dir): os.makedirs(saved_model_dir) summary_path = os.path.join(saved_model_dir, 'model_summary.txt') with open(summary_path, 'w') as f: model.summary(print_fn=lambda x: f.write(x + '\n')) logging.info(f"Model summary saved to {summary_path}") optimizer = tf.keras.optimizers.Adam(learning_rate=0.0002) model.compile(optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy', tf.keras.metrics.TopKCategoricalAccuracy(k=5), tfa.metrics.F1Score(num_classes=len(train_generator.class_indices), average='macro')]) # Custom Callback for Saving the Best Model in SavedModel format class SaveBestModelTF(tf.keras.callbacks.Callback): def __init__(self, monitor='val_accuracy', saved_model_dir='model_training'): super(SaveBestModelTF, self).__init__() self.monitor = monitor self.saved_model_dir = saved_model_dir def on_epoch_end(self, epoch, logs=None): current = logs.get(self.monitor) if current is None: logging.warning(f"Monitor '{self.monitor}' for saving the model is not available in logs.") return logging.info(f"Epoch {epoch + 1}: saving model to {self.saved_model_dir}/checkpoints/{epoch + 1}") epoch_path = os.path.join(self.saved_model_dir, 'checkpoints', str(epoch + 1)) if not os.path.exists(epoch_path): os.makedirs(epoch_path) self.model.save(epoch_path, save_format='tf') # Callbacks for monitoring progress tensorboard_cb = TensorBoard(log_dir='./logs') # Save class indices to a JSON file class_indices_path = 'model_training/class_indices.json' if not os.path.exists(os.path.dirname(class_indices_path)): os.makedirs(os.path.dirname(class_indices_path), exist_ok=True) logging.info(f"Directory {os.path.dirname(class_indices_path)} created.") with open(class_indices_path, 'w') as file: json.dump(train_generator.class_indices, file) logging.info(f"Class indices saved to {class_indices_path}") # Model training total_epochs = 7 model.fit( train_generator, initial_epoch=latest_epoch, # Start from the next epoch epochs=total_epochs, validation_data=validation_generator, callbacks=[SaveBestModelTF(saved_model_dir=saved_model_dir), tensorboard_cb] ) # Evaluate the model eval_result = model.evaluate(validation_generator) logging.info(f'Validation Loss: {eval_result[0]}, Validation Accuracy: {eval_result[1]}') # Save the final model as a SavedModel format (including .pb files) model.save('model_training/finished_model') logging.info("Finished model saved in SavedModel format at 'model_training/finished_model'") # Convert to TensorFlow Lite converter = tf.lite.TFLiteConverter.from_saved_model('model_training/finished_model') tflite_model = converter.convert() tflite_path = 'model_training/lite_model/trained_model_lite.tflite' if not os.path.exists(os.path.dirname(tflite_path)): os.makedirs(os.path.dirname(tflite_path), exist_ok=True) logging.info(f"Directory {os.path.dirname(tflite_path)} created.") with open(tflite_path, 'wb') as f: f.write(tflite_model) logging.info(f"Model converted and saved as {tflite_path}") 
During training i got following output:
Found 182235 images belonging to 475 classes. Found 60544 images belonging to 475 classes. Epoch 1/7 2848/2848 [==============================] - 11914s 4s/step - loss: 1.7624 - accuracy: 0.5931 - top_k_categorical_accuracy: 0.8152 - f1_score: 0.4739 - val_loss: 1.1666 - val_accuracy: 0.7043 - val_top_k_categorical_accuracy: 0.9013 - val_f1_score: 0.6053 Epoch 2/7 2848/2848 [==============================] - 11096s 4s/step - loss: 0.8293 - accuracy: 0.7788 - top_k_categorical_accuracy: 0.9435 - f1_score: 0.7094 - val_loss: 0.9409 - val_accuracy: 0.7533 - val_top_k_categorical_accuracy: 0.9277 - val_f1_score: 0.6818 Epoch 3/7 2848/2848 [==============================] - 11123s 4s/step - loss: 0.6247 - accuracy: 0.8274 - top_k_categorical_accuracy: 0.9632 - f1_score: 0.7760 - val_loss: 0.8422 - val_accuracy: 0.7761 - val_top_k_categorical_accuracy: 0.9386 - val_f1_score: 0.7080 Epoch 4/7 2848/2848 [==============================] - 11101s 4s/step - loss: 0.5070 - accuracy: 0.8562 - top_k_categorical_accuracy: 0.9743 - f1_score: 0.8165 - val_loss: 0.8002 - val_accuracy: 0.7885 - val_top_k_categorical_accuracy: 0.9428 - val_f1_score: 0.7249 Epoch 5/7 2848/2848 [==============================] - 11079s 4s/step - loss: 0.4261 - accuracy: 0.8766 - top_k_categorical_accuracy: 0.9814 - f1_score: 0.8445 - val_loss: 0.7757 - val_accuracy: 0.7940 - val_top_k_categorical_accuracy: 0.9458 - val_f1_score: 0.7404 Epoch 6/7 2848/2848 [==============================] - 11100s 4s/step - loss: 0.3641 - accuracy: 0.8932 - top_k_categorical_accuracy: 0.9856 - f1_score: 0.8657 - val_loss: 0.7639 - val_accuracy: 0.8003 - val_top_k_categorical_accuracy: 0.9472 - val_f1_score: 0.7432 Epoch 7/7 2848/2848 [==============================] - 11129s 4s/step - loss: 0.3142 - accuracy: 0.9068 - top_k_categorical_accuracy: 0.9889 - f1_score: 0.8838 - val_loss: 0.7701 - val_accuracy: 0.8014 - val_top_k_categorical_accuracy: 0.9470 - val_f1_score: 0.7474 946/946 [==============================] - 2671s 3s/step - loss: 0.7682 - accuracy: 0.8008 - top_k_categorical_accuracy: 0.9470 - f1_score: 0.7456 
And when I try to load the model and make a prediction with this code:
class own: def __init__(self): if not os.path.exists("models/own"): raise FileNotFoundError(f"Model path models/own does not exist") try: self.model = tf.keras.models.load_model("models/own", custom_objects={'F1Score': F1Score}) except Exception as e: print(f"Error loading model: {e}") raise if not os.path.exists("models/own/class_indices.json"): raise FileNotFoundError(f"Class indices path models/own/class_indices.json does not exist") with open("models/own/class_indices.json", 'r') as file: self.class_indices = json.load(file) self.index_to_class = {v: k for k, v in self.class_indices.items()} def classify(self, img_path): if not os.path.exists(img_path): raise FileNotFoundError(f"Image path {img_path} does not exist") # Load and preprocess the image img = tf.keras.preprocessing.image.load_img(img_path, target_size=(600, 600)) img_array = tf.keras.preprocessing.image.img_to_array(img) img_array = np.expand_dims(img_array, axis=0) img_array /= 255.0 # Make prediction predictions = self.model.predict(img_array) print("Raw predictions:", predictions) top_index = np.argmax(predictions[0]) top_class = self.index_to_class[top_index] print(f"Top class: {top_class}, Probability: {predictions[0][top_index]}") top_n = 5 top_indices = np.argsort(predictions[0])[-top_n:][::-1] for idx in top_indices: print(f"Class: {self.index_to_class[idx]}, Probability: {predictions[0][idx]}") return top_class 
it always either predicts Steak or Omelette:
2024-06-01 14:17:27.571776: I tensorflow/core/util/port.cc:113] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. WARNING:tensorflow:From C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\losses.py:2976: The name tf.losses.sparse_softmax_cross_entropy is deprecated. Please use tf.compat.v1.losses.sparse_softmax_cross_entropy instead. C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\tensorflow_addons\utils\tfa_eol_msg.py:23: UserWarning: TensorFlow Addons (TFA) has ended development and introduction of new features. TFA has entered a minimal maintenance and release mode until a planned end of life in May 2024. Please modify downstream libraries to take dependencies from other repositories in our TensorFlow community (e.g. Keras, Keras-CV, and Keras-NLP). For more information see: https://github.com/tensorflow/addons/issues/2807 warnings.warn( C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\tensorflow_addons\utils\ensure_tf_install.py:53: UserWarning: Tensorflow Addons supports using Python ops for all Tensorflow versions above or equal to 2.12.0 and strictly below 2.15.0 (nightly versions are not supported). The versions of TensorFlow you are currently using is 2.15.0 and is not supported. Some things might work, some things might not. If you were to encounter a bug, do not file an issue. If you want to make sure you're using a tested and supported configuration, either change the TensorFlow version or the TensorFlow Addons's version. You can find the compatibility matrix in TensorFlow Addon's readme: https://github.com/tensorflow/addons warnings.warn( WARNING:tensorflow:From C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\saving\legacy\saved_model\load.py:107: The name tf.gfile.Exists is deprecated. Please use tf.io.gfile.exists instead. 2024-06-01 14:17:31.363666: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: SSE SSE2 SSE3 SSE4.1 SSE4.2 AVX2 AVX512F AVX512_VNNI AVX512_BF16 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. WARNING:tensorflow:From C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\engine\functional.py:156: The name tf.executing_eagerly_outside_functions is deprecated. Please use tf.compat.v1.executing_eagerly_outside_functions instead. WARNING:tensorflow:From C:\Users\[Name]\AppData\Local\Programs\Python\Python310\lib\site-packages\keras\src\layers\normalization\batch_normalization.py:979: The name tf.nn.fused_batch_norm is deprecated. Please use tf.compat.v1.nn.fused_batch_norm instead. 1/1 [==============================] - 4s 4s/step Raw predictions: [[4.23421043e-05 1.45377373e-06 1.09034730e-02 1.19525917e-04 4.45407240e-05 5.72818244e-05 5.68609731e-03 5.15926695e-05 1.89958355e-05 1.39491487e-04 3.20717366e-03 9.63417915e-06 1.22947793e-03 4.01171012e-04 3.64649204e-05 1.75396308e-05 3.09416023e-03 7.56465085e-03 2.89075997e-05 3.90331191e-03 2.16231216e-03 4.18351328e-06 5.89632022e-04 9.40740295e-03 6.80321036e-03 2.32697069e-03 4.23964392e-03 1.56047070e-04 2.14435873e-04 6.95710623e-05 1.38103365e-04 1.78470847e-03 3.75193194e-03 5.94434096e-03 5.69255608e-05 7.57165905e-03 1.52613886e-03 9.48755944e-04 8.21925176e-04 3.18029453e-03 3.89393512e-03 8.41296278e-05 8.34997976e-04 3.14124190e-04 6.81638776e-04 1.10320523e-02 1.10815199e-04 6.18589204e-03 2.17406079e-02 3.72037102e-05 1.65579877e-05 1.30886221e-02 1.01435784e-04 2.13157946e-05 1.25499619e-05 8.94762017e-03 4.36880719e-03 4.78018774e-03 8.53170827e-03 1.45823974e-02 1.05571962e-05 1.12631078e-05 5.09415939e-03 8.12840741e-03 1.48212257e-05 1.52864438e-02 9.66716034e-05 2.25000476e-04 3.60531732e-04 9.28066402e-06 8.15156789e-04 1.09069003e-02 3.43796797e-04 2.53324561e-05 7.89516326e-03 1.44943051e-05 4.06841224e-04 1.67445414e-05 3.78527766e-05 1.80476491e-04 3.33699776e-04 4.13847056e-06 3.32273915e-03 6.51864940e-03 7.48403618e-05 2.68448726e-04 1.54245936e-03 2.95383972e-03 2.26996126e-05 3.64100002e-03 2.81597768e-05 3.11967051e-05 1.48438021e-05 8.46863433e-04 4.05767525e-04 1.75380992e-04 4.76581818e-06 5.42160356e-04 2.19287374e-03 1.18714366e-02 1.41884899e-04 8.76697595e-06 3.85931274e-03 4.37544841e-05 4.01919424e-05 3.87528981e-03 3.88057524e-05 2.69062322e-04 4.46968805e-03 1.17368818e-05 3.70194939e-05 1.55831876e-04 1.63894765e-05 2.38729117e-04 1.19046052e-03 2.12675819e-04 1.08185853e-03 3.01667496e-05 6.18575094e-03 3.91955400e-05 1.40065713e-05 3.02084809e-04 6.46927813e-03 3.37069832e-05 5.15250103e-05 2.31142567e-05 2.20274273e-03 3.17445702e-05 1.04452763e-02 6.80019803e-05 7.81101780e-03 1.23853814e-02 1.04819983e-02 3.20679283e-05 6.71340758e-03 6.94293885e-06 1.98310101e-03 5.29599565e-05 9.02036484e-03 4.57535089e-06 1.93145883e-03 4.06190008e-03 8.42716638e-03 1.50314684e-03 8.58115556e-04 1.22383237e-03 8.49474862e-04 5.48258470e-03 6.09953167e-05 1.57669128e-03 5.43692382e-03 4.88058169e-04 6.75312986e-05 3.43937165e-04 1.93276245e-03 4.06867871e-03 5.20323374e-05 7.78318281e-05 1.93508764e-04 1.14409677e-05 2.21324177e-03 1.90052821e-03 8.52691382e-03 2.43102224e-03 2.88419239e-03 2.53974522e-05 9.51182563e-04 2.32981285e-03 9.86064842e-05 4.14316915e-03 1.66544644e-03 1.02754391e-04 3.95776224e-05 3.02393187e-06 1.32082617e-02 4.14707232e-04 3.40229672e-05 4.81802830e-03 1.90598912e-05 4.08358377e-04 5.95443300e-04 1.22634810e-04 5.74091624e-04 8.57623760e-03 2.60962266e-03 2.95263715e-03 1.58088005e-05 1.64122172e-02 2.09987498e-04 2.36775051e-03 3.00696083e-05 3.46693669e-05 1.16249910e-04 6.94001559e-03 1.58400853e-05 1.95188422e-05 2.19169408e-04 3.09433235e-04 5.44128183e-04 6.35302160e-04 7.07127433e-03 1.19772732e-04 5.37439200e-06 1.91133395e-02 1.27979312e-02 3.89739592e-03 1.97048103e-05 2.29625002e-05 2.21050854e-04 1.92064399e-04 1.20139657e-05 3.20516920e-05 4.26828819e-06 3.64828011e-05 7.55213068e-06 2.67963973e-03 3.17923805e-05 6.19895945e-05 3.99544797e-06 2.68664648e-04 1.83274597e-02 8.71072552e-05 1.38439747e-04 4.96710254e-06 3.56023484e-05 1.34899991e-03 2.05766381e-04 3.96062108e-03 5.61600551e-03 5.31910664e-05 6.77773132e-05 1.36139952e-02 7.41477634e-05 1.63904135e-03 4.74587978e-06 1.45082246e-04 2.09337009e-06 8.13181920e-04 3.63194500e-04 6.46722084e-03 5.02364383e-05 6.90550078e-05 6.36972545e-05 2.09673337e-04 1.79036579e-05 2.36021675e-04 6.37291942e-06 5.70875318e-06 2.56235455e-03 2.72009202e-04 3.77103061e-05 5.63449021e-06 2.25979857e-05 2.61697169e-05 3.42375762e-03 1.04161156e-02 2.22223607e-05 6.27681802e-05 1.88465419e-04 2.82149922e-05 4.01149562e-04 1.31122259e-04 5.97863036e-05 2.41098423e-05 7.71318519e-05 3.57087993e-04 3.41462255e-05 1.01930054e-04 5.23206063e-06 2.95026781e-04 7.02897159e-05 3.99115682e-02 1.89455808e-03 1.74146010e-06 1.14775894e-05 7.84916210e-06 1.93041191e-03 2.37918808e-03 3.49449110e-03 6.98623667e-03 7.64393993e-03 4.12582303e-05 1.24030013e-03 1.72785169e-03 7.18316660e-05 5.17749111e-04 7.84919783e-03 1.04525541e-04 9.83856899e-06 8.77521088e-05 1.68125369e-02 4.09213862e-05 1.09552668e-04 2.54421811e-05 4.65482954e-05 6.95294410e-04 6.72869501e-05 2.40904570e-04 2.15112406e-04 3.85226776e-05 2.51369456e-05 4.68338234e-03 1.26862462e-04 9.00995801e-04 4.16984549e-05 7.36891707e-06 1.51534463e-04 1.48332631e-03 4.95935837e-03 1.91499032e-02 3.01804044e-04 6.28613270e-05 4.78365598e-03 8.38827982e-05 1.70516931e-02 1.52653758e-03 5.85798814e-04 3.11521399e-05 2.11968741e-04 7.41351105e-05 1.40834545e-05 8.93215940e-04 1.45371505e-05 4.96711982e-05 4.11317131e-04 8.89070239e-03 5.06997202e-03 3.08362325e-03 2.77415646e-04 3.75299685e-04 1.19906381e-05 1.50029315e-03 1.14443043e-04 2.52026439e-05 9.22407198e-04 3.51146841e-03 1.11564566e-06 1.36691102e-04 3.53032886e-03 2.15746608e-04 8.79282816e-05 4.36248304e-03 1.77966576e-04 1.47887832e-03 6.94399816e-04 8.03673174e-04 5.23004041e-04 3.90421192e-04 1.06344873e-03 3.55399796e-04 6.01265463e-04 1.55850008e-04 1.33491016e-03 1.09734829e-04 4.38019342e-04 2.42487862e-04 6.84730615e-03 1.02040754e-03 1.07652310e-03 3.51822848e-04 9.20735547e-05 7.50967592e-04 1.44127226e-02 3.58455327e-05 5.16555374e-05 1.31370616e-03 9.02966480e-04 1.24254671e-03 5.20300702e-04 8.57163919e-04 3.66344648e-05 2.01024144e-04 6.52487564e-04 5.93215809e-04 5.76604251e-03 6.19325438e-04 1.16480421e-03 2.37531040e-05 2.50119111e-03 7.08868974e-05 5.99786472e-05 2.55976247e-05 4.62695534e-05 4.24469297e-04 6.20667648e-04 4.15926515e-05 7.03983005e-06 8.77018738e-06 5.21141301e-05 2.11411956e-04 7.74205779e-04 5.31276630e-04 6.44316664e-04 4.07212786e-03 2.68336060e-03 1.74210854e-05 3.76385942e-05 6.74255705e-03 4.46323538e-05 2.76757801e-05 2.56290223e-04 1.22213329e-04 1.22734054e-03 7.73016480e-04 1.11903930e-02 3.16570923e-02 2.75775470e-04 5.73344238e-04 2.86890985e-03 1.10085262e-03 1.35615155e-05 2.66479654e-03 1.99418981e-03 4.31017601e-04 9.68350447e-04 3.51598108e-04 8.54862970e-04 3.52715979e-05 1.46333405e-04 5.10955288e-05 1.48639630e-03 1.80458324e-03 7.51840998e-05 1.13529910e-04 3.89828119e-06 8.74532212e-04 1.12358983e-04 3.93593837e-05 6.01037289e-04 2.06997487e-04 3.94766452e-03 1.09549124e-04 2.11403880e-04 6.95336203e-04 5.99777419e-03 5.45272342e-05 2.56420486e-03 2.20299728e-04 4.23851707e-05 6.69996080e-04 2.66609713e-04 1.55276459e-04 2.75739990e-02 3.43240798e-03 2.68303775e-05 1.52821158e-04 9.82575657e-05 4.00313947e-05 6.07266993e-05 5.28094570e-05 1.02948405e-04 6.20577412e-05 2.12161940e-05 2.99842539e-03 1.17558768e-04 1.58015324e-03 3.30074807e-04 1.19093776e-04 2.52985101e-05 1.59350988e-02 4.89539379e-05 1.05491054e-05 1.09012712e-04 2.97089737e-05 7.28885690e-03 1.87386977e-05 1.85028894e-05 5.79945299e-05 1.54079917e-05 9.85169099e-05 1.05076749e-03 7.55816349e-04 2.62255053e-05 1.18091421e-05 2.95209320e-05]] Top class: omelette, Probability: 0.03991156816482544 Class: omelette, Probability: 0.03991156816482544 Class: steak, Probability: 0.03165709227323532 Class: tacos, Probability: 0.027573999017477036 Class: breakfast_burrito, Probability: 0.021740607917308807 Class: pulled_pork_sandwich, Probability: 0.01914990320801735 (own): omelette - 3.66s 
Help would be appreciated because im slowly losing my mind :(,
Jonas
submitted by Jonasbru3m to tensorflow [link] [comments]


2024.06.01 14:02 RLEsportsMods Best of r/RocketLeagueEsports - May 2024

Hello everyone. Action on and off the pitch battled for headlines as the Major 2 qualifiers rolled on This is the Best of RocketLeagueEsports for May 2024

What is this?

These monthly posts will serve two purposes throughout the year:
The hope is that come the Year End Subreddit Awards, the material curated here both by the moderators and the community itself can be useful during the nominations process, as it's easier for users to recall remarkable posts/comments from January in the first week of February, instead of the last week of December.
And if the subreddit awards don't interest you, at least these posts can serve to be a nice time capsule for this community going forward!

How is stuff curated here?

While we're open to changing what us mods curate here, initially we will curate the following:

Best of RocketLeagueEsports - May 2024

The Top Submissions:
  1. All POVs of NiP's absurd kickoff strategy vs Complexity
  2. Following the clip of NWPO being racist in a Discord call, R1 release their roster.
  3. Arsenal on nwpo's apology.
  4. Retals' Day 1 Open Qualifier Heatmap
  5. G2 freestyling in a RLCS match like its a plat lobby
The Best General/Discussions Threads:
  1. Werty pleads to Psyonix on the current state of SSA RLCS
  2. Squishy's view on RLCS format after *spoilers* in EU Open Qualifier 5
  3. This weekend‘s discussions in a nutshell
  4. With online play (mostly) over and 107 days until the world championship begins, Feer questions if teams can maintain their form shown thus far in the season
  5. LGs Broadcast Producer tweets out a thread talking about the current RLCS format and bringing back league play
Top Goals/Plays of the Month
  1. All POVs of NiP's absurd kickoff strategy vs Complexity
  2. G2 freestyling in a RLCS match like its a plat lobby
  3. 2 of the goals Dark scored this weekend in the RLCS MENA
  4. Beastmode keeps getting away with it
  5. Kofyr uses two flip resets to turn an unscorable angle into an insane goal for Lil Step Bros
Top Comments of the Month
  1. u/GrowImonDrgnButt points out the flaws in NWPO's apology
  2. u/spooki_boogey expresses sympathy for Rule One
  3. u/vivst0r is offended because they are in fact perfect
  4. u/woliwoliwoli speculates on a potential 2 time ban
  5. u/WhatIsSentience appreciates the rise off kickoff strategies
  6. u/coolcole93 makes it into a meme and goes on a date
  7. u/caronfirenodriver on VKSailen trash talking Atow
  8. u/s_mkt comments on Marc_by_8's chair
  9. u/_should_not_post lists the most disrespectful moments in RLEsports history
  10. u/SilverSage16 summarizes Karmine Corp's major odds
Best Live Event Thread Comments of the Month
  1. u/Zinedine_Tzigane with an appropriate assessment of the early stages of the EU Qualifier
  2. u/TheRetroCrowe takes aim at the "Joyo is overrated" narrative
  3. u/Waterpalolegend predicts Johnnyboi's power rankings after KC lose
  4. u/S_h_u_n with an appropriate reaction to Suhhh's record vs KCorp
  5. u/Candyyyyyyy shares a legitimate graph of Retals' accolades

How You Can Contribute!

The posts and comments linked above are not meant to act like the actual best of the best this month, just a guide based on upvotes. The actual best of RocketLeagueEsports is where you guys come in.
Be sure to comment below your standout and memorable posts and comments from February. Discussion prompts, shitposts, image comments, standout predictions, memes, or even appreciation for certain users, anything that would fit into the year-end subreddit awards we want to see it below, or even just a discussion about the past month on this subreddit.
-RocketLeagueEsports Mods
submitted by RLEsportsMods to RocketLeagueEsports [link] [comments]


2024.06.01 14:00 relax7777 Notification sub-categories no longer a thing?

I have a Samsung Z Flip 5.
When I go to the Notification Settings for my Apps (in Android settings), once I've selected a particular App - I only have the option to either turn all Notifications off, or turn them all on.
I'm sure I used to be able to turn on/off specific sub-types of notification on a per-app basis. E.g. For a shopping app I could turn on 'Order update' notifications , but disable general marketing notifications. Now it seems like I can only choose between all or nothing. Has this changed recently?
submitted by relax7777 to AndroidQuestions [link] [comments]


2024.06.01 13:59 Typical-Geologist272 AITA for stopping my ex from seeing his child

Just a bit of contect he was never involved in my pregnancy since we broke up when I was 10 weeks pregnant I let him know when we was born and he message me when can I see her.
I arrange for him to see her when she was 5 days old he didnt seem that interested in her at all he never looked at her he held her one during the few hours he saw her. He came again when she was about 2 weeks old and that when the problems started.
He constantly picked fights saying I wasn't allowing him to see her and I was a horrible mother that she's not his as she looks nothing like him etc.
When she was 3 weeks old he tried to end it twice it go that bad the police had to drag him back to the hospital, I said thats since his mental health isnt the best that's its not the best idea for him to be seeing her and we can arrange somthing for when his mental health was better I still messaged him sent in photos and we still ft when he could.
2 weeks later I got a letter though for mediation I message him and he said that his mental health was better and that even if he and said anything I would allow him to see her ( which I never did the planning wasn't the best as when he was buzy I was free amd when I was buzy he was free so him seeing her was difficult to arrange) after I decline he stopped messaging me
8 months later I put in for child support he got his sister to message me to tell me I'm a horrible person for putting in for CS when I don't allow him to see her (i never said he couldnt see her) so I message him to see if he wanted to see her and I would arrange somthing so that he wasn't on his own with her so like at a visitation centre.
We went back and forth for a few days then I asked him it not message me everyday (as all it was is him asking is she was OK and constantly comparing her to his family but when i said oh she gets that from my side of the family he would accuse me of pushing him and his family out and that im trying to turn her against him she was 9 months old at the time) he flipped out on me.
Saying how can I help and support my child if you don't allow me to message. I put back I'm not saying not message just not every day and I might be the arsehole for this but I said that asking how she is amd comparing her to your family isn't helping or supporting her.
Then he went on a rant about how much of a horrible person I am and then demanded to see her or he would take me to court to I Just put fine see you in court then during the conversation he kept trying to argue with me but I had tried to remain civil
A day later I got a call from CS as he had denied being her dad and we had to go wait for a DNA test and I had to answer some question. His family however kept messaging me about the case it was like a message or two a day which I had ignore I ended up closed the CS case as his family kept on harassing me about the DNA test
So aita for stopping him from seeing his child?
submitted by Typical-Geologist272 to AITAH [link] [comments]


2024.06.01 13:50 Choice-Ad6376 Ac started the turned off including thermostat

Turned on the ac for the 2nd time this year. It ran for about a minute and then turned off. Noticed the thermostat had turned off too. (It is a ecobee smart thermostat). Checked breaker box but the breaker didn’t trip. Tried flipping the breaker off then back on. Did not fix issue. Breaker box and ac are on the same side yard and I kinda smelled something burnt ish over there. Also ac has a quick disconnect like box attached to it. I noticed the 2 110 outlets that are on the quick disconnect still work even when the breaker for the ac is turned off and a green light on the outlets says monthly test is still on. Looking online it says it maybe a capacitor went bad? Is that the most likely case? How can I verify? Newish house built 2018 and rheem ra13 series ac unit.
TLDR ac won’t turn on and trying to figure out if I need to talk to hvac guy or electrician?
submitted by Choice-Ad6376 to hvacadvice [link] [comments]


2024.06.01 13:25 HerrBogomil Freeboard is underappreciated

TLDR On a lark I bought a used Freeboard and I can only recommend it.
I'm longboarding since 3 years, mostly freeride and surfskate. I like going downhill and work on Standup sliding. My goal is to learn to slide both goofy and regular. however the switch stance still gives me grief. On a lark, I bought a used G3 freeboard for a song. While the first session was terribly fucking awkward, by the 3rd session I could do 180s in both stances. A feat that took me literally a year on a longboard. I think it is mostly a matter of speed. Slides on a longboard require a certain amount of it. This in turn requires either balls or skills. And both require training. On the freeboard you don't need a lot of speed, so the risk of injury is lower. Another benefit of the lower speed is that I can go back to less steep roads, that have gotten boring on a longboard and are also easier to find and safer.
submitted by HerrBogomil to longboarding [link] [comments]


2024.06.01 13:24 guest_from_Europe a look at first round of 2004-2018 NFL drafts, players sorted by position and quality

Yesterday i put a link to the 25 year study of first round drafts. There were some complaints about it. Now i looked at more recent drafts and didn't just search for All Pros or Pro Bowlers... but conclusions are very similar.
I went through first round of 2004-2018 drafts, sorted players by position and career quality. 2004 was taken as cutoff because of modern passing rules, recent drafts weren't looked at, it's still unclear how will young players do in the future. It didn't matter if some player played for more teams, only his whole career is important. I looked mostly at weighted career AV (approximate value).
Player quality i sorted into:
Career AV numbers are skewed toward QBs and WRs, less to non-stats positions, so is draft position. It isn't perfect, but i don't know anything that compares LBs, DTs, OTs, WRs on the same scale. Defense has no yards, TDs,... If a player is still active i projected that they will add something to their career AV. AV overvalues QBs such as Winston, A. Smith,... i put them 1 group lower. Winston is a backup, Alex was average.
In 2004-2018 drafted in the first round were 44 QBs, 37 RBs, 54 OTs, 28 inside O-line, 16 TEs, 57 WRs, 49 DTs, 75 EDGE rushers (includes 3-4 DEs), 31 inside LBs, 57 CBs, 32 S. 480 total players. Complete busts were 22.3%, not worth in the first round 26.2%, 7.7% were HOF-level players and top QBs, 18.3% were stars. So only 26% total turned into what fans are excited about. 25.4% were in the middle group, above average starters, what should be expected.
About 50% of first round picks are wasted on busts or reached for players. Yet teams value them more than real players, rarely is a player traded for 1st round pick.
By position
QB 29.5% bust, 27.3% backup, 21.5% average QB starter, 22.7% franchise QB
RB 16.2% bust, 35% not worth it, 19% above average, 27% stars, 2.7% HOF
OT 11.1% bust, 22.2% not worth it, 35.3% above average, 26% stars, 5.5% HOF
iOL 14.3% bust, 14.3% not worth it, 32.2% above average, 28.5% stars, 10.7% HOF
TE 6.2% bust, 62.5% not worth it, 31% above average starter
WR 26.3% bust, 26.3% not worth it, 24.6% above average, 17.5% stars, 5.3% HOF
DT 24.5% bust, 30.5% not worth it, 22.5% above average, 16.3% stars, 6% HOF
EDGE 30.7% bust, 17.35% not worth it, 24% above average, 17.3% stars, 10.7% HOF
iLB 12.9% bust, 22.6% not worth it, 19% above average, 29% stars, 6.4% HOF
CB 29.8% bust, 22.8% not worth it, 24.6% above average, 17.5% stars, 5.3% HOF
S 18.7% bust, 27.5% not worth it, 22% above average, 18.7% stars, 3.1% HOF
Conclusions:
Most total busts were 23 edge rushers, 17 CBs, 15 WRs, 13 QBs.
submitted by guest_from_Europe to nfl [link] [comments]


2024.06.01 12:53 Berserk2024 Official title: Matthew Hammond age 16. Class1A cause I can. (Updated)

Official title: Matthew Hammond age 16. Class1A cause I can. (Updated)
Name Matthew Hammond age 15
Quirk: Power control
Has the ability to control a type of energy only he has.
Ways to use this energy is standard strength and speed by but the power into your muscle fibers and organs to enhance their performance. Matthew can also materialize this power in different ways. He finds out how he can do this by watching others, for example shock waves ,energy tenticals, force fields, laser eyes, power balls, or claws.
With the performance enhancing with his eyes ears and touch, he will become the weave nation president.
He is highly resistant to energy based attacks.
The main weakness is energy drain. I know what y'all probably thinking, "this is like every quirks weakness cause I using your quirk can tire you out", but his quirks energy drain.
When he use his quirk, the energy runs down and if uses a all out attack he loses the energy and cant use his quirk.
But on the good side , this energy grows back over time , and since it's his quirk energy if he goes to where he let out a huge attack, he can absorbed it back. It takes like an hour to get his power back to 100% naturally if he just goes on with his day.
Mutation: * Wolf Ears and Tail: Matthew has permanent wolf-like ears and a tail, got them from his dad.
Also he can share his energy with others to enhance their quirks cause energy works like that I think.
Elemental Forms and Powers:
  1. Lightning Form (Name: Volt)
  • Powers: Electrokinesis, Enhanced Speed
Weakness: If used too much then it will damage his nervous system temporarily.
  1. Fire Form (Name: Blaze):
  • Powers: Pyrokinesis, Heat Resistance
Weakness: if you use too much then we'll gain first degree burns.
  1. Water Form (Name: Aquos):
  • Powers: Hydrokinesis, Water Healing
Weakness: No clear weakness.
  1. Ice Form (Name: Frost):
  • Powers: Cryokinesis, Enhanced Durability
Weakness: if in a heated area, then he will get weak and slower.
  1. Light Form (Name: Radiance):
Weakness: overused will make him temporarily blind
  • Powers: Photokinesis, Illusion Projection
I put darkness here because I have light so I might as well put dark, f**k physics.
  1. Darkness Form (Name: Shadow):
  • Powers: Umbrakinesis, Intangibility
Weakness: Obviously light. Even a flashlight.
  1. Plant Form (Name: Flora):
  • Powers: Chlorokinesis, Plant Communication
  1. Metal Form (Name: Alloy):
Weakness: Overtime he'll get weaker cause of rust. Water
  • Powers: Metalkinesis, Enhanced Strength
  1. Earth Form (Name: Terra):
  • Powers: Geokinesis, Earth Manipulation
No clear weakness.
How this works is his power works like a computer l well more like just a normal computer, then a virus AKA and elements infects this power and then it slowly corrupts his powers AKA but there until his body is able to use a different elements,the form is only as strong as his original form because the element can only corrupt what is there,not make more power. A little bit of this energy is still there so that it can take back control. Each form has a mind of its own like some dark shadow type thing. The reason why his forms have minds of their own is cause if I gave him full control his forms he would swap to frequently and that would be two strong.
The reason why he can do this is when he was 3 is quirk was developing and he was exposed to radiation messing with his quirk genes and when his quirk developed it added this.
Hero Costume:
  • Appearance: Matthew's hero costume features a sleek and modern design. It includes a green suit with black accents, symbolizing his main form's green hair and eyes.
  • Symbol: The costume incorporates a stylized emblem combining the symbols of all his elemental forms, representing his fusion abilities.
  • Support Items: Matthew carries specialized bracers that enhance his control over his energy whips. Additionally, he has utility pouches to store various tools and gadgets.
He also has tubes that are like water bottle size on this belt that he charges with his energy every night before he goes to bed in case he runs out of energy himself.
Has a sword made of quirkinite(past post.)
Maximum Output Move: Master Elemental
  • Description: Matthew channels and combines the energies of all his elemental forms into one ultimate attack.
  • Effects: Elemental tattoos appear on his body, symbolizing each element's power. Lightning appears on his head, fire on his chest, water on his left arm, ice on his right arm, light on his left leg, darkness on his right leg, plant on his right shoulder, metal on his left shoulder, and earth on his back. The attack unleashes a devastating and overwhelming burst of elemental energy.
Carrys amounted cannons inspired by Titan speaker man. They harness his power into either energy balls or just flat out lasers.
To make my self clear his max form when he uses all his elements he can only use 1/10 of his forms power including him as the main guy of this whole body.
Moves
Signature move; Packs fist. When Matthew charges his entire arm with power he has the target with a powerful punch, and with the recoil he hits them again so it's like a double punch with one arm.
Normal moves:
Surge. Full cowling but different.
1 Minieye(Minigun eye): Uses his laser eyes as a mini gun to help save power and hit targets with better accuracy.
Ground capture. He uses his tentacles like energy whips by forcing them into the ground and then have to make their way to their opponent grab whatever is touching the ground, and then pull them down to immobilize them.
Field fist. By using his force fields that his energy makes, he shapes them into bigger arms for durability and extra strength.
Power kick. By constantly doing a condtant (front flip like spinjitzu but in disc form and front flip) he sticks his leg out and it hits the target. If the target has high durability the the attack will keep on going like a saw.
Support moves
Force field armor. Covers his body is compressed force field power and where's it like armor. Skin tight but durable. And it see through
Clones wolf. Exposed energy and the compression it to the point it can shape itself into a copy of him and then somehow take color. The two main differences is the clones have a limited power usage, so use too much power it disappears. And in physical design difference the clones eyes are constantly green like glowing green.
Instinct. By enhancing his hearing and touch and his brain process, he can use our off-brand ultra instinct. Laughing his hearing is touch he can feel every movement in the air you can feel everything around him, and he can hear each movement in the air with his ears, and his brain calculates how long it took for the sound to reach his ear then he knows exactly where he attacked her is.
Sword moves.
Excalibur. Three swings, x slash, then stab
Powerful blade. One big vertical slash.
Ultimate Moves.
Max power. Puts all of his power into one part of his body. Arm leg or head, then uses that power in a all out melee attack.
Alpha Surge. This is a super Saiyan like form. His energy propels him as him fly freely. Insane amount of strength, speed, durability, and power.
Single elemental moves. Each form only has three.
Fire :
Blue Star. Increase heat to the fire to the point his color turns blue as hot to the touch, all damage increased.
Fire Fist. His fist as if it was made from flames, the punch is fast and strong, and on impact it's a shockwave of fire.
Jet Drive . For places on his back shoot flames constantly for speed. and then the user does a great fire kick.
Water :
Aqua alpha. Water in a wolf head from bites down on the opponent.
Drowner. Basically a small tsunami the size of a house. Many use for chaos and confusion.
Water jet cutter. You know how the water can put a specific kind of nozzle with high pressure it could be the sharpest thing in the world, and be very dangerous. Well he can do that with the tip of his finger.
Lightning:
Pinpoint thunder. All power goes into his fingertips then he strikes at one spot.
EMP. Self-explanatory. Also works on people.
Power ground. Covers the floor in electricity.
Earth :
Earthquake. Also self-explanatory.
Crystal quake. Creates rocky spikes around the area and also as crystals to them. Depending on the crystal you have different effects, quarts will make all electric attacks go to the spikes cars quarts are conductive.
Earth wall. Makes a durable wall.
Ice:
Constant shards. Smg but ice bullets.
Domain expansion, Arctic area. Covers just about the entire area with ice, giving the user just about full control over where he is fighting.
Ice capture. Traps you point it in a giant ice crystal.
Plant:
Domain expansion, Jungles Forest. Creates. A lot of trees and plants to take control of the area.
Golem. Creates a big beast made of wood and plants.
Spear. Covers his arms with wood and vines in a sharp spear.
Metal:
Weaponry. Grabs a lot of mail and constantly makes different weapons depending on the situation, maybe even a shield.
Iron spike. Crates constant metal spikes in front of the user.
Armor. Makes armor around allies.
Light:
Beam. A big ah laser.
Bounce back. Constantly reflects off objects confusing the enemy.
Bang. Just a stun grenade, but it's coming from his hands.
Darkness.
Warper. One name, kirogiri.
Consume. Absorb things into the darkness.
Control. Mind control.
Omega move. Max element.
More of a fusion of his forms , here's the list of this was powerful forms moves.
Cryogenic Barrage Matthew gathers concentrated orbs of water and ice energy He then launches a rapid-fire volley of freezing projectiles that detonate on impact
Pyroclastic Flow
Matthew ignites streams of fire energy, blending them with molten earth and metal He sweeps his arms to direct a raging torrent of scorching lava and magma
Tempest Devastation
Matthew summons torrents of water that he infuses with electrical currents He then unleashes a violent maelstrom of electrified, crushing waves
Radiant Implosion
Matthew focuses beams of light energy, bending them with dark shadow tendrils He compresses the luminous orb, creating an imploding singularity of blinding power
Verdant Tangle Matthew interweaves thorned vines with solidified metal shards His grasping, spiked tendrils ensnare and impale anything they catch
Seismic Upheaval Matthew ruptures the ground, combining earth tremors with shard-like metal spikes Jagged, uneven terrain and piercing spikes erupt from below to impale targets
Final move.
Packs punch.
All elements in one giant punch attack.
This form it's only as powerful as Dekus 100%. He can just access this all the time. He needs to have a lot of power and needs total concentration.
PERSONALITY
He's smart nice in a way sometimes a prankster if you try to roast him you will go complete PackGod on you, if he's in a bad mood don't try to comfort him you'll just make things worse and not for him for you, he's really cocky, but not bakugo cocky hes just a bit over confident. He is very creative. He loves tech to the point if you break a 30 buck phone around him, let's just say you gonna want to do a crime in front of batman before Matthew finds you.
Background:
Favorite food: Red Beans and rice
6ft 2
His mom, him , brothers, and his sister some how got the same disease that Michael Jackson had so they all went from black to white. But since you're still African they do still have passes
Hates obsesseve people.
(I'm bout to do some bull shit y'all probably won't like.)
His older brother has a pheonix quirk and he's a hero.
His younger brother has the fusion quirk that I made in a past post)
His sister has digital master quirk but I also need in a previous post.
Any way I hope y'all like my reworked oc
submitted by Berserk2024 to BNHA_OC_Characters [link] [comments]


2024.06.01 12:46 ExamOrganic1374 AIO to this? Suspected abuse in family

Here's my situation:
Recently I visited with an aunt who was neglectful of her two grandsons of 2 and 5 years old. In addition to this, she was also very hostile and impatient towards my 85 year old grandmother. These behaviors led me to shift into "surveillance mode".
Regarding the neglect of the boys, she would:
1.) Allow them to trip and fall without ever tending to them, no matter how bad the fall.
2.) Not give them water or fluids/food on a consistent basis.
3.) Leave them confined in "crib cages" that were bungee corded shut with blankets thrown over them for significant proportions of each day, despite begging to be let out. She would do the same with high chairs, leaving them there often in excess of 60 minutes.
4.) Ignore them whilst in said cages despite them clearly being ill with a respiratory infection that featured a nasty croup cough, leaving them with no water.
5.) Fail to tend to them for said respiratory illness at any point in any way throughout the day, including a failure to have them seen by a physician.
6.) Would respond with harsh unkindness to their pleas for attention.
She has near total control of the children, despite my cousin and the father of the children also living there.. I got the sense that this is something about which they are unhappy, but powerless to change. They would consistently want their children with them, yet she would quietly go about getting them back out to the living room where she would just ignore them.
I got the feeling that she is coercing my cousin and her boyfriend into submission in some way for some reason, when I was defending my grandmother and my cousin came and rallied with me on such in agreement, that she should be treated better.. my aunts response to her about this in a moment she thought I wasn't paying attention, was a nasty scowl and neck chop gesture that I believe meant "knock it off, or else", of which clearly intimidated my cousin and silenced her immediately followed by a quick retreat to her room. Her boyfriend also tended to display signs of similarly fearful or anxious submission.
Fast forward to my last night there.....:
1.)The children are locked in the cages, and she's finishing up showering or whatever in the bathroom.
2.)She comes out in a robe and sits across from me on her bed, and begins to part her robe and expose her undergarments whilst groping her inner thighs and clearly enjoying it, during which she attempted to make eye contact.
3.)After about 30 seconds of me paying "zero attention" to what she was doing and looking about the room, she huffed, closed her robe and stood up, and began displaying obvious indifference towards me.
4.)Approximately 5 minutes after standing up and acting clearly "moody", she approached the cage in which the 5 year old was residing. She crouched down to his level and out of sight and began repeating "Come over here my little boys name, grandmas not gonna love you until you come right here". He seemed to hesitate to do so, because she said it numerous times before stopping.
5.)Almost immediately after she stopped repeating those words, he began to giggle and laugh intensely for a period of roughly 30 seconds, followed by a single "suction pop" like sound, after which the giggling laughter ceases.
6.)She stands up and pivots away from the cage with a huge grin on her face whilst still looking towards the cage, and proceeds to lick her lips and wipe her mouth.
I tried to tell myself it couldn't have been what I thought it was and tried to suppress the extreme uneasiness welling up inside me, convincing myself she was just tickling him innocently like a grandma... Until she was sitting down moments later on the couch and he started coughing badly, gasping for air, managing to utter once before falling silent "Grandma, please let me out!", to which her response was cold hearted "Nope, uh uh!"
Long story short, I damn near lost it and left the house for a 'walk' which was actually me calling the police and trying to report her for neglect, police did nothing, I went to my oldest cousin who I thought was a mature adult and tried to convince her to plant nanny cams in her home near those cribs because I believe 100% she's sexually abusing at least the older boy, to which her response was to flip shit and tell the whole family, whom now despise me for having the "gull" to "fabricate" such a story which has made me in turn despise them for calling me a liar.
I've been plotting ways to try and see what the fuck is really going on... It's been on my mind every fucking day since...
Am I overreacting...?
submitted by ExamOrganic1374 to AmIOverreacting [link] [comments]


2024.06.01 12:45 username_avi swissy evil arc?

swissy evil arc?
TRANSCRIPT: slide 1: swissy: this mistake of a banana slide 2: swissy: i was presumed dead, he took my wife and house slide 3: swissy: today is the day he will be presumed dead slide 4: swissy: today is the best day of my life. slide 5: random swissy friend: hi swissy! swissy: suprisedly jolts, turning the gun towards canny and shoots slide 6: swissy: darn it just killed my wife
submitted by username_avi to JOKE_CAMP [link] [comments]


2024.06.01 12:42 RaweMemes THE PHOTO CLUB

Jessie loved photography. When her high school formed a new photography club, she eagerly joined. One day, Mr. Thompson, the club advisor, announced a special project: documenting the town’s history. Jessie was assigned the old, abandoned mansion at the edge of town.
Excited, she grabbed her camera and headed out after school. The mansion, with its crumbling facade and air of mystery, had always fascinated her. As she approached, she felt watched. Shaking off the eerie feeling, she pushed open the creaky gate and stepped into the overgrown garden.
Inside, the mansion was a photographer's dream. Dust motes floated in beams of sunlight filtering through broken windows. Jessie snapped picture after picture, moving from room to room. In an upstairs bedroom, she found an old, dusty mirror. It was cracked but still reflected the room behind her. As she raised her camera, she froze. In the reflection, she saw a figure standing behind her. She spun around, but the room was empty.
Heart pounding, Jessie convinced herself it was her imagination. She continued taking pictures but felt uneasy. As she finished, she decided to take one last photo of the grand staircase. Focusing her camera, she saw it again—the figure, now at the top of the stairs, staring down at her. This time, it was a young girl in an old-fashioned dress, eyes hollow and dark. Jessie snapped the photo, and the flash seemed to make the girl vanish.
She bolted from the mansion, not stopping until she was safely home. That night, she developed the photos in her darkroom. The images were stunning, capturing the mansion's haunting beauty. But in every photo, the same girl stood in the background, watching her.
Terrified, Jessie brought the photos to Mr. Thompson the next day. He examined them, his brow furrowing. "These are incredible, Jessie. But the girl... she looks familiar."
He pulled out an old yearbook. "This is Emily Sinclair. She lived in that mansion and disappeared over fifty years ago. They never found her."
Jessie's blood ran cold. "So, what does that mean? Is she... a ghost?"
Mr. Thompson shook his head. "I don't know. But there's something you should see." He led her to a locked cabinet and pulled out old, yellowed photos. "These were taken by students in the photography club over the years. Look closely."
Jessie flipped through the photos, her heart sinking. In each one, Emily Sinclair was in the background, always watching. "Why didn't anyone say anything?"
"They did," Mr. Thompson replied grimly. "But no one believed them. Everyone thought it was a trick of the light."
As Jessie stared at the photos, she felt a chill run down her spine. She looked up at Mr. Thompson, but he was gone. The room was empty, except for her and the photos.
Suddenly, the door slammed shut, and the lights flickered out. Jessie felt a cold hand on her shoulder and turned to see Emily Sinclair, her hollow eyes filled with sadness and rage.
"You've seen me," Emily whispered. "Now you belong to the house, just like all the others."
Jessie screamed, but no sound came out. The darkness closed in around her, and she felt herself being pulled away, her camera slipping from her grasp. The last thing she saw was Emily's ghostly face, inches from her own, before everything went black.
When the lights came back on, the room was empty. The photos lay scattered on the floor, and Jessie's camera was gone. She was never seen again, just another victim of the mansion's dark history, forever captured in its haunting photographs.
submitted by RaweMemes to shortscarystories [link] [comments]


2024.06.01 12:41 just-engneer434 make a simulation for cube fall down on tube

i want make a simulation for cube fall down on tube but there a massage error about shell contact . how solve that ?
https://preview.redd.it/ca1lnlnmvx3d1.png?width=387&format=png&auto=webp&s=b7c337be735478c831d4066093a6dfab5cf8bfd4
https://preview.redd.it/fuq2xknmvx3d1.png?width=495&format=png&auto=webp&s=c2ffb419c94b5fac2f39b8ace556c9019e3e0a47
https://preview.redd.it/5z8o0o7nvx3d1.png?width=1461&format=png&auto=webp&s=e887767c630f38cca6c2caf7e0a75b36e9985fc5
submitted by just-engneer434 to ANSYS [link] [comments]


2024.06.01 12:31 ConsequenceSure3063 Best 1911 Red Dot

Best 1911 Red Dot

https://preview.redd.it/dji68u8vtx3d1.jpg?width=720&format=pjpg&auto=webp&s=28df6640646d1159f54d6bcb0ac9ad47ab9bae60
Welcome to our exciting roundup on the latest and greatest in the world of firearms - specifically, the 1911 Red Dot. If you're a fan of the classic 1911 pistol design, you're in for a treat as we explore the latest advancements, improvements, and innovative features that make this iconic firearm even better. Buckle up and get ready to dive into the world of the 1911 Red Dot, as we bring you the perfect blend of tradition and modernity.

The Top 5 Best 1911 Red Dot

  1. Adjustable Retention IWB Holster for 1911 Red Dot Optic Cut - Discover the perfect fit with our 1911 Red Dot IWB Holster, featuring adjustable retention, ride, and cant, maximizing comfort and flexibility in your carry options.
  2. Adjustable Retention OWB Holster for 1911 5" Government Red Dot Optic Cut Constitutional Carry Left - Experience ultimate comfort and flexibility with this 1911 5" Government No Rail Only Red Dot Optic Cut OWB Holster, offering adjustable retention, ride and cant, clip positions, and a protective sweat guard for a perfect fit and secure draw.
  3. Comfortable OWB Holster for 1911 5" Government 45ACP with Rail Only Red Dot Optic Cut - A sleek and adjustable OWB Kydex holster for your 1911 5" Government 45ACP with Rail Only Red Dot Optic Cut, designed for maximum comfort and customizable carry options.
  4. Customizable Kydex OWB Holster for 1911 45ACP Red Dot Optic Cut - This adjustable retention, ride and cant OWB holster for 1911 3.25" Defender 45ACP No Rail Only Red Dot Optic Cut ensures a comfortable, secure carry with optional clip positions and a protective sweat guard.
  5. Discrete 1911 4" Commander Red Dot Covert Holster - Custom-molded OWB holster for 1911 4" Commander, designed with adjustableretention, ride, cant, and clip for secure, comfortable carry.
As an Amazon™ Associate, we earn from qualifying purchases.

Reviews

🔗Adjustable Retention IWB Holster for 1911 Red Dot Optic Cut


https://preview.redd.it/mdonfkhvtx3d1.jpg?width=720&format=pjpg&auto=webp&s=33eae95f494402493bb0b8e9e0e0d3bb2251e38e
I recently tried the "1911 5" Government 45ACP with Rail Only Red Dot Optic Cut IWB Holster" in my daily life. This holster, made from Kydex material, is specifically designed for the 1911 5" Government 45ACP with Rail Only Red Dot Optic Cut. The first positive aspect I noticed was that it's not too flashy, so you can keep your firearm close to your body without drawing too much attention. The material is durable and doesn't flex easily, meaning you can trust it to protect your firearm and keep it secure when it's needed most.
However, one downside I experienced was that it took some time to adjust the holster to my personal comfort level. Although the retention is adjustable, it took a fair amount of time to find the right setting. Additionally, the holster is designed for specific gun configurations, so if you don't fit the description perfectly, it might not fit your firearm as securely as you'd like.
Overall, I found the "1911 5" Government 45ACP with Rail Only Red Dot Optic Cut IWB Holster" to be a decent choice, but it's important to note that it might not be perfect for everyone. If you're looking for a versatile and reliable holster for your specific 1911 5" Government 45ACP with Rail Only Red Dot Optic Cut, then this might be the right option for you. But if you're looking for something with more adjustability and a more universal fit, you might want to explore other options.

🔗Adjustable Retention OWB Holster for 1911 5" Government Red Dot Optic Cut Constitutional Carry Left


https://preview.redd.it/db9hazvvtx3d1.jpg?width=720&format=pjpg&auto=webp&s=5f4373f91ae0485bcfa6ea322f04c964ef5566cf
As a seasoned firearm enthusiast, I've tried out many holsters in my time. Recently, I had the opportunity to give the 1911 5" Government No Rail Only Red Dot Optic Cut OWB Holster a spin, and I must say, it was quite an experience.
The holster's design caught my attention right away, with its patriotic red, white, and blue color scheme. It's a bit unconventional, but it certainly stands out. Made of durable Kydex, the holster seemed well-constructed and reliable. The adjustable retention system allowed me to set the tension to my personal preference, which I appreciated.
Unfortunately, the fit wasn't perfect for my 1911. As someone who values a snug and secure holster, I found that this particular model left a lot to be desired. The tension was too loose, which caused the holster to flop around when I tried to draw my firearm. Additionally, the design of the belt clip didn't sit right on my waistband, making it difficult to draw without accidentally unholstering the gun.
On the plus side, the sweat guard was a nice touch, preventing any contact between the gun and my body. I also liked that the clip could be adjusted up to 12 different positions, which allowed for more flexibility when carrying my firearm.
In conclusion, the 1911 5" Government No Rail Only Red Dot Optic Cut OWB Holster had some great features, but fell short when it came to providing a secure and comfortable carry experience. While it may work well for some users, I personally wouldn't recommend it for its poor fit and clumsily designed belt clip.

🔗Comfortable OWB Holster for 1911 5" Government 45ACP with Rail Only Red Dot Optic Cut


https://preview.redd.it/rl8eoubwtx3d1.jpg?width=720&format=pjpg&auto=webp&s=be78645515fee9f0294389e6396ae3277a5ed144
When I first came across the 1911 Red Dot OWB Holster, I must admit, I was a little skeptical. Would it be comfortable? Would it really provide the security I needed for my firearm? But after using it for a few weeks now, I can confidently say - this holster is the real deal.
The most striking feature of this holster is its adjustable retention. This means you can set the tension to your personal comfort, ensuring your gun stays securely in place. The adjustable ride and cant provide maximum flexibility in carry options, allowing for a comfortable and discreet fit - something I appreciate as a gun owner who needs to blend in.
Despite its numerous advantages, this holster also has a couple of drawbacks. The adjustable clip has a limited number of positions, which may be a concern for some users. Also, the protective sweat guard can sometimes cause friction against your body, which can be uncomfortable over long periods.
Overall, the 1911 Red Dot OWB Holster has proven to be a reliable and comfortable option for those looking for a secure way to carry their firearm. While it may not be perfect, its adjustable features and protective sweat guard make it a worthwhile addition to any gun owner's arsenal.

🔗Customizable Kydex OWB Holster for 1911 45ACP Red Dot Optic Cut


https://preview.redd.it/g82bf6pwtx3d1.jpg?width=720&format=pjpg&auto=webp&s=f047142a5410f9f30f7d09c9510fcbe542ab5b57
A while back, I got my hands on a Kydex holster that's an absolute dream to wear. I've been using it daily to carry around my trusty 1911 Defender with a red dot optic cut, and boy, it's been a game-changer.
Adjusting the holster's tension is like finding that perfect sweet spot where it feels like it's securely hugging your gun, but not so tight that you have to take a sledgehammer to draw it. The flexibility for adjusting ride and cant angles is just the icing on the cake. Adjusting the clip position is another one of those little details that makes a big impact - it's like having your own personalized holster.
Now, let's talk about the protective sweat guard. It's a detail that's often overlooked, but it's crucial for keeping your gun and body happy in those hot summer months. The secure belt clip is the cherry on top; it ensures that you'll always get a clean draw, and your gun will stay right where it's supposed to be.
But here's the caveat: this holster is specifically designed for the 1911 3.25" Defender with a red dot optic cut. It won't fit just any old Colt or Taurus—there's a certain finesse to this thing. So before you go and get it in your hands, make sure you're the right fit for it. If you are, though, you're going to fall in love with this bad boy.
All in all, I've got to say that this holster has been a game-changer in my life. It's made my concealed carry experience so much more comfortable and secure. Just be sure to check your 1911's specifications before diving in—there's no point in falling for the wrong holster.

🔗Discrete 1911 4" Commander Red Dot Covert Holster


https://preview.redd.it/0c0v900xtx3d1.jpg?width=720&format=pjpg&auto=webp&s=8389849691c5b6b8a7186b1f332ca1a27d456558
As a reviewer, I recently tried this 1911 Red Dot OWB Holster and was thoroughly impressed with its durability and comfort. The holster is made from Kydex, a lightweight and strong material that ensures a snug fit for the 1911 4" Commander 45ACP No Rail Only Red Dot Optic Cut.
One of the standout features of this holster is its adjustable retention, which allows users to set the tension to their personal comfort. This customization ensures that the holster securely holds your firearm without causing any discomfort or strain on your belt.
Additionally, the adjustable ride and cant provide maximum flexibility in carry options, making it suitable for various situations and environments. The adjustable clip, with up to 12 different positions, ensures compatibility with a range of different belts and waistlines.
However, the protective sweat guard has a small learning curve to work around, as it can obstruct the draw slightly at first. Nonetheless, once you get used to it, the sweat guard minimizes contact between your firearm and your body, providing an increased level of safety and comfort during wear.
The secure belt clip is another crucial aspect of this holster, ensuring clean draws each time. It holds your firearm securely, but doesn't compromise the ease of access when needed. Overall, I would highly recommend this OWB Holster for any 1911 Red Dot users looking for a dependable and comfortable carrying solution.

Buyer's Guide

Welcome to the Buyer's Guide for 1911 Red Dot sights. This guide will cover important features, considerations, and general advice to help you make an informed decision when purchasing a 1911 Red Dot sight. We will not list specific product picks or provide external resource links in this guide.

Features


https://preview.redd.it/rmpdx9kxtx3d1.jpg?width=720&format=pjpg&auto=webp&s=8993272466add51a68e7268ca64d298871a1116f

Red Dot Optic

Red dot optics provide a simple, user-friendly shooting experience. They allow for quick target acquisition and improved accuracy, especially in low-light conditions. Red dots are suitable for both close-range and long-range shooting and are commonly used in concealed carry and self-defense situations.

1911 Compatibility

Ensure the red dot sight you choose is compatible with your 1911 pistol model. Different 1911 models have varying dimensions and mounting positions, so it is essential to select a sight that fits your specific pistol.

Reticle Types

Red dot sights often come with different types of reticles. The most common are the dot reticle, dot with crosshair, and crosshair reticle. Consider your shooting preferences and the intended use for your 1911 when choosing a suitable reticle.

https://preview.redd.it/aq4h27vxtx3d1.jpg?width=720&format=pjpg&auto=webp&s=172c2c367d39491eb935fb4f75ef5c1fcf0dc0ee

Brightness Control

Adjustable brightness control is an important feature for 1911 red dot sights, as it allows you to adjust the intensity of the red dot according to lighting conditions or personal preference. Some red dot sights have multiple brightness settings, while others have a single brightness setting or automatic brightness adjustment.

Battery Life

Battery life is another crucial factor to consider. Longer battery life ensures fewer interruptions during your shooting experience. Battery consumption may vary between red dot sights, so it is worth comparing the battery life of different models.

Considerations

https://preview.redd.it/rhhd11dytx3d1.jpg?width=720&format=pjpg&auto=webp&s=53f5cfdffe1a66bc956c744759ed9c4df044baf7

Price

1911 red dot sights come in various price ranges, so consider your budget when making a purchase. The cost of a sight may affect its build quality, features, and brand reputation. Determine your priorities and shop accordingly.

Durability

Investing in a durable red dot sight is essential, as it will be exposed to various harsh conditions while attached to your 1911 pistol. Look for sights with strong construction and weather-resistant materials to ensure durability.

Brand Reputation

Purchasing a red dot sight from a reputable brand can provide peace of mind and assurance about the product's quality. Research the brand's history, customer reviews, and warranty offerings before making a decision.

https://preview.redd.it/tqmrz4lytx3d1.jpg?width=720&format=pjpg&auto=webp&s=bdfd7097266010b7e85a7abeb43192cbb6844619

General Advice

When mounting a 1911 red dot sight, ensure proper co-witnessing with your iron sights. This allows you to have both a red dot and traditional sights visible, which can be beneficial in low-light conditions or when using the iron sights for backup.
Practice using your 1911 red dot sight regularly. Familiarize yourself with its features and functions to make the most of your purchase. Regular training can also help you improve your shooting skills.
Lastly, remember that the purpose of a red dot sight is to enhance your accuracy and improve your overall shooting experience. Choose a sight that best fits your needs, preferences, and intended use for your 1911 pistol.

FAQ

Understanding 1911 Red Dot

1911 Red Dots are sighting devices that are mounted on top of traditional 1911 firearms. These sights were developed to enhance accuracy and speed of target acquisition, making these firearms more effective in various shooting scenarios.

Which 1911 Red Dots are recommended for beginners?

For beginners, it's recommended to start with a 1911 Red Dot that offers good value for money. Some popular choices include the Holosun 507K, Trijicon MRO, and the Vortex SPARC II.

What is the difference between 1911 Red Dots and other sighting systems?

1911 Red Dots provide a wider field of view, which enhances target acquisition speed and allows for more accurate shots. These sights also co-witness with standard iron sights, making them suitable for a range of shooting scenarios.

How do I mount a 1911 Red Dot?

The process of mounting a 1911 Red Dot involves securing the sight to the firearm's slide. This can be done using either a milled or machined slide mount kit. The specific installation process can vary depending on the sight and slide configuration. It's recommended to refer to the manufacturer's guidelines or consult a professional for proper installation.

What are some factors to consider when choosing a 1911 Red Dot?

  • Brightness settings - Adjustable brightness settings for various lighting conditions are important for a smooth shooting experience.
  • Reticle shape and size - Different reticle shapes and sizes can impact the ease of use and target acquisition speed.
  • Battery life - Longer battery life ensures the sight doesn't turn off unexpectedly during shooting.
  • Price - While it's true that you get what you pay for, it's important to find a balance between quality and cost.
As an Amazon™ Associate, we earn from qualifying purchases.
submitted by ConsequenceSure3063 to u/ConsequenceSure3063 [link] [comments]


2024.06.01 12:12 Aquacat_1223 Advice on current position - 31m with partner graduating from nursing degree

Hi everyone, I just wanted some advice on my current position to see how I can set myself up nicely in the next decade and see how I'm doing at the minute.
I've recently turned 31 and have always been a bit of a finance and spreadsheet nerd, so have always tracked my person finances ever since I earned an income from my waitering job when I was 14.
I feel as though I'm in a reasonably good position, but wanted some advice how to maximise my 30s to set myself up well going into my 40s. I don't know why, I just feel as though the next decade is quite a big one to set my future self up well. My current position is below:
Salary: £75k (expect this to move to £80k in the next month or two) Take home after tax, NI and company car: £3.9k Another form of income brings in an average of £300 Total take home: £4.2k
My outgoings total about £4k/month give or take. My mortgage and bills come to £2k/month and then we spend an average of £2k/month on food and all activities/holiday funds. We're certainly not materialistic but do spend a fair amount on travelling to different places.
Current finances are: House value £380k, mortgage just under £270k, so £110k equity. Workplace pension: £50k Emergency fund spread across high interest savings accounts: £30k Bitcoin: £40k Vanguard account (mainly S&P500): £15k Total net worth: £245k
The last few years have been difficult, with my income being the only source for the house. My girlfriend has finished a nursing degree which has taken 3 years and is due to start a new role in August on a starting salary of £28k. This will bring in another £1.8k I'm assuming.
Based on that, with my salary and my partner's new source of income, we'd have about £2k/month extra which I'm thinking on consistently adding into the vanguard SS ISA.
My questions I'm hoping for some advice on are the following: - Is the thought of investing all outstanding income into S&P 500/World fund (£2k/month) the best plan? There are months we'll have more than this, but I'd say that's the average. - I've been with my partner for 6 years and she's definitely the one. However I bought the property myself 7 years ago before her and she has only ever contributed the food bills etc. I've always paid the full mortgage and bills etc. She has just over £5k to her name. Are we to just combine everything anyway once we tie the knot? - Is my current position pretty good? I've worked hard throughout my 20s whilst still travelling a lot and having an overall great time. I'm unsure whether I'll want to retire early, but I certainly want full financial independence. (Well aware of the FIRE community and read a lot of their posts). Is my plan through my 30s going to set us up well and make us FI if we stay consistent with the additional income?
Last point, no kids yet and will be a flip of a coin whether we have them eventually. If I was a betting man I'd probably say we will in about 5 years time. I understand this would completely change our path, but we'll cross that if it ever happens.
Appreciate your advice in advance and apologies for the essay.
submitted by Aquacat_1223 to UKPersonalFinance [link] [comments]


2024.06.01 11:59 YukiteruAmano92 There Will Be Scritches Pt.180

Previous Interlewd XLI Next First

---Sample---

---Fnurfar’s perspective---
---2710 Terran Calenda3 years BF---
All six of my paws desperately scramble against the slick pavement of the Prosperity back alley as I flee for my life!
Pursuing me… is a monster!
His species aren’t meant to be sprinters!
They said if it came to a chase, I just needed to quickly get out of his line of sight and keep going and he’d not be able to keep up!
I skid around a corner and steal a glance behind me, seeing two furious eyes moving towards me so fast that they seem to leave streaks of emerald green behind them as afterimages!
The Fury is so close on my tail that he’s almost certain to catch me now!
It would be laughable how much my… ‘employers’ had underestimated him if it weren’t so terrifying!
Youve got a Terran with you! There should be no issue!’
Yes, that idiot mercenary they hired almost had me going with his smug, arrogant proclamation that ‘Big=slow! Slow=dead!’ as he idly showed off his little knife tricks!
My confidence lasted up until the very moment I saw the one we were supposed to rob!
[20cm] taller than the skinny mercenary and looking like he could easily weigh twice as much, the man was a Hunt damned beast compared to the one who was meant to protect me from him!
I think Flynn reassessed his cocksure attitude as well because, rather than waiting for me to have an opening like we agreed in the [fucking] plan, he just drew a knife and tried to stab the monster to death!
An extremely poorly calculated risk!
There was no competition!
This juggernaut dealt with Flynn as easily as Flynn could have dealt with me!
It took him a matter of seconds to dispatch my accomplice but that was a matter of seconds where he was distracted enough that I was able to snag what we had been after… not that it makes any difference now!
Just as I hear thundering footfalls coming up on my left, powerful fingers impact the space between the bottom of my neck and the top of my top shoulderblades.
I’m slammed into the ground… but not killed
I can feel the power contained in the iron grip around my neck…
I know that decapitating me would be as simple as deciding to close his fingers but, as I wait for death to come, it doesnt
Instead, the hand slides up my shoulders, gathering the loose skin and lifting me up like a kit in her parent’s mouth…
The first thing I’m able to see is the monster’s flat, booted feet, followed by a pair of long thick legs, then a chest and left arm covered in a loose fitting, buttoned shirt, patterned with vertical and horizontal lines.
The red fabric of his top disguises the bloodstain from the wound he got from Flynn, just below his shoulder. However, the nauseatingly metallic smell of it absolutely fills my nostrils!
The final thing to be revealed, as my feet hang more than [a metre] from the ground, is a face… the scarred skin a pale beige, the white, calcite teeth bared in a furious grimace, copper coloured eyebrows tilted downward in the middle over a nose, wrinkled with anger, and emerald eyes, burning with rage!
His shoulders rise and fall, in time with panted breaths he sucks in and out through his gritted teeth, putting me less in mind of a person (or even an animal) catching their breath after exertion and more in mind of some hulking piece of machinery from the Steam Age venting its pressure!
The Terran extends his pallid skinned, long fingered, furless, pentadactyl left hand to me, stained with the ferrous blood that’s run down his sleeve, and growls “Sample!”
No…” I breathe, terrified.
GIRL! I AINT fuckin’ PLAYIN’ with you!” he snarls, curling all but his index finger and jabbing it towards my snout “You’re gonna. GIVE. BACK. what you. FUCKIN’. STOLE!”
Youcan take itfrom my corpse…” I defy, clutching my exhausted, trembling pawhands to the front of my jumpsuit.
Effortlessly, his free hand comes forward, batting my four aside, before pinching the top of the stasis vial and pulling it free, with there being absolutely nothing I can do to stop him!
He holds up the tube, in which is visible a small plant with a rosette of frilly black leaves and through which can be seen a frozen impression of the room it was in when it was stasised, demanding “You’re really willin’ to die for this!? For corporate espionage?!… Why the fuck’s this matter to you like that?!?!?!”
“I dont careabout the plantat all…” I answer, defeated.
His face twists in a sneer as he asks “Then why tell me I had to pry it from your cold. dead. hands!?”
Becauseif I come backemptyhandedtheyre going totorture my husbandand sonand make me watch!… If I dontcome backat all… maybe theyll let them go!” I pant in answer.
His face falls blank… but I can tell that is not because he’s no longer angry!
Instead, his redoubled rage has gone from white hot to ice cold as he leans in and demands “Whosthey’?”
---2715 Terran Calenda2 years AF---
One!?” demands the sceptical, lutrine, Nvar man, one of six listening to my story for the first time (along with the two friends who’ve heard it before), holding up a webbed pawhand and extending a single finger “You’re trying to tell us that one Terran dismantled the entire Giluspri Sisters’ Syndicate, overnight!?… Simply because you told him a sob story about them holding your family hostage!?”
“I did say you wouldn’t believe me(!)” I smirk, lifting my drink to take a sip.
“You’re damn right I don’t believe you!!!” he sneers “It might have been a little more believable if you’d made it a team of a dozen or so Terrans that were guarding this thing but one!?… There’s no way it took a single individual a single night to root out and entirely destroy an enterprise that Prosperity’s government had been hunting for nearly [2 decades], even if that individual was a Terran!”
I place my drink down on the table and turn the palms of all four pawhands to the ceiling as I say “Believe me or dont… that’s exactly how it happened!”
“Hmmm… Don’t know ’bout ‘exactly’…!” comes a familiar voice from behind my head, in the next booth over.
I freeze and straighten my back.
The friends and audience in my booth are looking past me, curiously, but, from their faces, it doesn’t look like they can see anything.
I stand and slide out through the gap between the table and Nafnarl’s footpaws.
I turn right and am immediately able to see that the booth next to us is occupied by a mixture of Terrans and some much smaller humanoids with green skin.
I keep going, rounding the partition to reveal…
“By the Hunt! Victor?!” I exclaim, seeing the man sat with his back almost exactly to where I was sitting, next to another tall, slim humanoid with blue skin and four arms.
His copper hair is much longer, his face isn’t as scarred and isn’t wearing the disgusted sneer that characterised so much of the time he and I spent together but… there’s no mistaking it!
The man turns his head, smiling, before standing up to nearly twice my height and extending a palm to ruffle the fur between my ears, saying “How’s it goin’, Foxy? You look a lot better ’an you did last time I saw you at least(!)” gesturing with his other hand up and down my less skinny and less visibly scarred body.
“Never mind that, Victor! What are you doing here?! You didn’t tell me you were coming back to Prosperity!”
He smirks “Yeah, sorry Foxy… It’s a loose lips sink ships kinda deal… Just thought I’d show my friends here the bar you brought me to celebrate after everythin’ was done that time… Didn’t think I’d actually run into you here!”
I stare up at the man, agog, for a few moments before reaching up with both my left hands and closing them around his wrist.
He allows me to drag him back to the head of my table.
“Nafnarl! Gfurnaf! This is him! This is the one I’ve been telling you about for the last [5 years]!” I say to my two Graufna friends before turning to the rest of the table to declare “Hes the man who took down the Giluspris! He’s Victor ‘Cuddles’ Taylor!”
With mirthful bemusement, the Terran raises his left palm to the table to smile “Y’alright guys!” before his eyes scan the faces and his expression goes concerned. He turns to me and asks “Your hubby alright, Foxy?”
I bare my teeth (I hope friendlily) and answer “Fnarnulf’s fine, Victor!… Fuffarn too! This is just a girl’s night…” gesturing at my two friends “…or… it was(!)” gesturing over the four men and two women, of four different species, who joined us to hear my story.
“What did you mean by it not being ‘exactly’ right?” queries Lunvo, the same sceptical Nvar who voiced disbelief before, still looking sceptical (not that I can blame him) but at least impressed by the fact that the ‘con’ has an (imposing looking) Terran stooge now(!)
Weeeeell…” Victor frowns down at me, mirthfully “…the way she described me dodgin’ that knife attack, she made me sound almost psychic(!)… In reality, she and this guy werent as smooth as she seems to think(!) The fact that I even got nicked by someone I was payin’ as much attention to as that is a bad reflection on my reaction time!… Also, she kinda made it sound like I went into their headquarters with a gun in one hand and a lit plasmasword in the other(!) As I recall, I gave ’em all a chance to surrender and come quietly and it were only after they, shall we say, indicated a lack of interest in that option that my weapons first cleared leather!… Oh! And what was with all that comparin’ the way I pant to ventin’ steam engines, Foxy(?!)”
But…” starts Muan, a nervous tolypeutine Wne woman beside her Wno husband, Kmuw “…you don’t deny it was you and you alone who brought down the Giluspris?… Without help?”
The pale skin of the Terran’s flat face performs a complicated scrunch as he considers the question before answering “Don’t know ’bout ‘without help’… I had Foxy here for showin’ me the way, after I’d done a lotta convincin’… and, once I’d taken care of ’em, local law enforcers came to take the survivors away… Aaaaand… I probably didnt actually manage to kill or capture every last one of ’em… just gutted its power structure enough that the rats fled the sinkin’ ship(!)”
“Why are you calling her ‘Foxy’?” asks Lunvo, four eyes narrowed in suspicion “‘Fnurfar’ is the name she gave us!”
The large man shrugs his shoulders “I didn’t get her name until we came here to celebrate… she didn’t trust me to give it… Had to call her somethin’, so I called her Foxy.”
“Hmmm…” responds Lunvo “…I’m not buying it…”
Victor raises an eyebrow “You ain’t buyin’ me givin’ her a nickname(!?)”
“I don’t believe any of it! The whole story reeks of the fanciful!”
I bare my teeth and slam my paws on the table before snarling “I don’t care if you question my honesty, Lunvo, but this man saved my husband’s life, my son’s life, the lives of dozens of others, freed me from effective slavery and freed this planet from its largest criminal syndicate! I will not have you questioning his integrity!”
Lunvo cowers away from me, despite the table separating us.
I feel a large, strong hand on my shoulder.
I turn to see a smiling face.
Eeeeasy there, Foxy… ’Preciate the defence but there aint no need to get heated over it!… ’Specially not when there’s a really easy way to sort this out…” he looks up at Lunvo and asks “Lunvo, was it? Could I ask you to look up the front page of the Prosperity Chronicle from the 3rd of September, 2710?… I think you’ll see a picture of me shakin’ hands with your governor at the time…”
---
Previous Interlewd XLI Next First
Discord
Dramatis Personae
submitted by YukiteruAmano92 to HFY [link] [comments]


2024.06.01 11:58 jonnyjarko Weird problems with breaker

Gonna be as detailed as possible about this because its a weird thing and is concerning. I live in a manufactured home (trailer built in the 70's). A couple days ago half our power went out. By half, I mean everything in the outlets. Cieling fans, lights, and the master bedrooms outlets stay on. I figured to flip the related switches and it worked, albeit after a couple times. Soon after it happened again and I had to flip the main one. However, when I turned it off and it was firmly in the off position the outage reversed but turning the main brealer switch back on stopped the flow of power. I found an outside dealy with a couple copper fuses and another pull in/out thing that assume is to the AC unit. After pulling out the flat fuse looking thing and asjusting the copper fuses (ouch btw) I got the power back on. However, it goes out nightly and I have to redo the reinsertion of the flat fuse dealy and adjusting copper fuses (I started lightly tapping with my phone case). The process has been, I have to fiddle with the outdoor breaker thingy, flick the main breaker off and back on, and then the ACs breaker off then back on and it works out. The main breaker still provides power in the off position and wont provide power to the rest of the house UNLESS I do the AC switch too. The AC switch alone does nothing.
I would appreciate any information I can get, so I can provide it to someone who can come look at it. I have exactly -$48 and am out of work due to injury, so I need to try to cut out diagnostics fees and all that and so that I dont get screwed by someone.
submitted by jonnyjarko to AskElectricians [link] [comments]


2024.06.01 11:57 Grandpah My beeper with battery beeps when I arm, then proceeds to beep every 5 seconds. But I have turned off settings. What gives? I have to arm the drone, lift it up, push the beeper button, then fly if I want to fly without constant beeping.

My beeper with battery beeps when I arm, then proceeds to beep every 5 seconds. But I have turned off settings. What gives? I have to arm the drone, lift it up, push the beeper button, then fly if I want to fly without constant beeping. submitted by Grandpah to fpv [link] [comments]


2024.06.01 11:49 SnooStrawberries6343 Im unsure of who has to say this but...

If you are going to do touge especially if your new to it, and new means done it less than like 7 times, don't: - Try to drift (you'll say "oh its only 30 mph" until you say "its only 30k to fix") - Don't use your ebrake, and don't let your passengers either, power slides are ok but the ebrake is gonna slide your rear end out far more than you believe - you don't have to take a corner fast to have fun, taking a corner at 45 is gonna be just as fun as taking it at 80, if your getting bored of doing 45 then take a break from touge and do some street for a month or 2 and then go back to it. - Know your limitations and know your car "Oh I saw it on initial D" does not make you a professional. and "Oh in Formula Drift they make it look easy" is not a valid excuse to your insurance company nor is it to the media, when people see you rolled 8 times because you thought doing your first drift on a mountain pass was gonna be a smart idea, think again, no one can be too professional, and theres still times in professional racing that this stuff happens, like I was watching formula drift about a month back and a guy with a bmw had his wheels turned but his front wheels lost traction and instead of sliding around the corner he just slid straight sideways, so if your going to attempt a drift be 90% confident that's what you wanna attempt and accept the risks, think it over like reading terms and conditions. dont be 100% confident tho, thats too confident.
submitted by SnooStrawberries6343 to Touge [link] [comments]


2024.06.01 11:30 mcm8279 [Opinion] SCREENRANT: "Star Trek Is Better Because Of 11 Lower Decks Canon Additions" (like a Bonsai tree for breakfast, everything tasting like black licorice, and an endless supply of steaming hot bananas.)

"These are the messy, broken pieces that inevitably exist in Star Trek, not just on the ships, not just on the worlds the USS Cerritos checks up on, but within the characters themselves. Star Trek: Lower Decks makes Star Trek better because it shows us characters that are real and relatable , with problems we can identify with, and additions that keep enriching Star Trek canon."
Star Trek Is Better Because Lower Decks introduced:
"Replicator Malfunctions; Hysperia, The RenFaire Planet; Ferengi Television; Badgey; AGIMUS; The Dog, Created By Tendi; Moopsy; The Smiling Koala; Orion Culture; Starfleet's California Class Starships; The USS Cerritos Crew)."
Jen Watson (ScreenRant)
Link:
https://screenrant.com/star-trek-lower-decks-canon-best-additions/
Quotes:
"Naturally, Star Trek: Lower Decks thrives on callbacks to some of the weirdest and most beloved parts of earlier Star Trek shows, but there's no need to have an encyclopedic knowledge of all things Star Trek in order to enjoy Star Trek: Lower Decks. Instead of relying on all the Star Trek shows that preceded it, Lower Decks references itself as the seasons go on. Best of all, Star Trek: Lower Decks enriches Star Trek canon by introducing new elements that just didn't exist before, from minutiae that "upper decks" shows might not concern themselves with, to brand-new characters and creatures we all want plush versions of.
Replicator Malfunctions
What is life like for a junior officer on a starship? How do they sleep, shower, and eat? Well, it turns out that the lower deckers sometimes have to deal with what happens when the USS Cerritos' food replicators get a little ... touchy. Unlike tragic transporter malfunctions, which are the basis of many an introspective morality play on Star Trek, the not-so-tragic replicator malfunctions on Star Trek: Lower Decks are a little more comedic in scope.
If there's one true thing about advanced technology, it's that it's going to break at some point. Brad Boimler seems to get stuck with the worst of Star Trek: Lower Decks' replicator malfunctions, like a Bonsai tree for breakfast, everything tasting like black licorice, and an endless supply of steaming hot bananas. And when replicator credits are a thing, you can't just keep asking for new stuff. You get "banana: hot," and you deal with it.
[...]
Orion Culture
Star Trek: Lower Decks seeks to balance out the sexist tropes from Star Trek: The Original Series and the attempts to flip the narrative about Orion women in Star Trek: Enterprise by featuring Star Trek's first Orion main character: Science Officer in training Lt. D'Vana Tendi. Tendi showcases Orion culture from her own perspective, calling out Beckett Mariner for subscribing to harmful stereotypes, while also illustrating the true things that perpetuate the ideas of Orions being pirates and assassins.
In Star Trek: Lower Decks season 4, episode 4, "Something Borrowed, Something Green", Star Trek's first visit to the Orion homeworld offers glimpses into the aspects of Orion culture that explain persistent stereotypes and why Tendi is sensitive to them. Danger and deception are a matter of course for Orions, with aspects of piracy playing a part in everything from Orion wedding traditions to competitive games. The Orion culture seen in Star Trek: Lower Decks is surprisingly nuanced, and actually manages to reconcile seemingly conflicting ideas from earlier Star Trek shows.
[...]
The USS Cerritos Crew
By far, the best additions that Star Trek: Lower Decks makes to Star Trek canon are its characters. Star Trek: Lower Decks would be nothing without the depth and realism of the character-driven stories that lay at the heart of every single episode. Each of Star Trek: Lower Decks' main characters has their own fatal flaw, ther own way of getting in their own way, so to speak, that proves relatable to those of us who are ambitious perfectionists, subject to self-sabotage, hiding who we are, unsure how to use our talents, or trying to find our place -- no matter how old we are. At the end of the day, Star Trek: Lower Decks is about the Warp Core Five's friendships making them stronger, better people.
By offering in-depth looks at the lives of junior officers, Star Trek:Lower Decks shows the depth of life aboard a Starfleet ship from the ground up. These are the parts of Starfleet that the upper-decker heroes of other Star Trek shows aren't privy to, or left behind long ago. These are the messy, broken pieces that inevitably exist in Star Trek, not just on the ships, not just on the worlds the USS Cerritos checks up on, but within the characters themselves. Star Trek: Lower Decks makes Star Trek better because it shows us characters that are real and relatable , with problems we can identify with, and additions that keep enriching Star Trek canon. "
Jen Watson (ScreenRant)
Link:
https://screenrant.com/star-trek-lower-decks-canon-best-additions/
submitted by mcm8279 to trektalk [link] [comments]


http://swiebodzin.info