File size: 3,550 Bytes
ddfcbfd
fea1e17
e9d74ac
 
 
 
b1334ad
1b8bf98
 
 
 
bc874f6
1b8bf98
 
 
 
 
e9d74ac
1b8bf98
 
e9d74ac
 
 
d27ee2d
ba0af48
 
 
e9d74ac
1b8bf98
 
e9d74ac
1b8bf98
 
 
e9d74ac
1b8bf98
 
 
 
 
 
 
e9d74ac
 
1b8bf98
e9d74ac
 
1b8bf98
e9d74ac
 
1b8bf98
 
e9d74ac
 
1b8bf98
 
 
 
e9d74ac
 
1b8bf98
a2bf36a
e0191a5
 
1b8bf98
e9d74ac
4f7405e
c3d5e68
e0191a5
 
 
 
 
c7e5ac7
 
 
 
39f82d8
082d146
ba0af48
39f82d8
ba0af48
1b8bf98
 
 
 
 
d27ee2d
 
39f82d8
d27ee2d
1b8bf98
 
 
 
b1f819b
1b8bf98
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
import gradio as gr
import tensorflow as tf
import numpy as np
import os
import PIL
import PIL.Image

# Create a Gradio App using Blocks    
with gr.Blocks() as demo:
    gr.Markdown(
    """
    # AI/ML Playground
    """
    )
    with gr.Accordion("Click for Instructions:"):
            gr.Markdown(
    """
    * uploading an image will setup, train, and evaluate the base model
    """)

    # Train, evaluate and test a ML
    # image classification model for
    # clothes images
    def modelTraining(img):
        class_names = ['T-shirt/top', 'Trouser', 'Pullover', 'Dress', 'Coat',
               'Sandal', 'Shirt', 'Sneaker', 'Bag', 'Ankle boot']
        
        # clothing dataset
        mnist = tf.keras.datasets.mnist

        #split the training data in to a train/test sets
        (x_train, y_train), (x_test, y_test) = mnist.load_data()
        x_train, x_test = x_train / 255.0, x_test / 255.0

        # create the neural net layers
        model = tf.keras.models.Sequential([
          tf.keras.layers.Flatten(input_shape=(28, 28)),
          tf.keras.layers.Dense(128, activation='relu'),
          tf.keras.layers.Dropout(0.2),
          tf.keras.layers.Dense(10)
        ])

        #make a post-training predition on the 
        #training set data
        predictions = model(x_train[:1]).numpy()

        # converts the logits into a probability
        tf.nn.softmax(predictions).numpy()

        #create and train the loss function
        loss_fn = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
        loss_fn(y_train[:1], predictions).numpy()

        # compile the model with the loss function
        model.compile(optimizer='adam',
                      loss=loss_fn,
                      metrics=['accuracy'])
        
        # train the model - 5 runs
        # evaluate the model on the test set
        model.fit(x_train, y_train, epochs=5)
        test_loss, test_acc = model.evaluate(x_test,  y_test, verbose=2)
        post_train_results = f"Test accuracy: {test_acc} Test Loss: {test_loss}"
        print(post_train_results)

        # create the final model for production
        probability_model = tf.keras.Sequential([model, tf.keras.layers.Softmax()])

        # Input image pre-processing before submission to the model
        # the image can be passed as a PIL or numpy

        # Normalize the pixel values?
        print(f"Input image shape: {img.shape}  Dimensions: {img.ndim} Array Element: {img[0]} ***********************************************************************")
        # assuming image_array is your input image array of shape (552, 3)
        resized_array = np.resize(img, (28, 28)) # resize the array to (28, 28)
        input_array = np.expand_dims(resized_array, axis=0) # add an extra dimension to represent the batch size

        # Make a prediction using the model
        prediction = probability_model.predict(input_array)
        predicted_label = class_names[np.argmax(prediction)]
        # Postprocess the prediction and return it
        return predicted_label
        

    # Creates the Gradio interface objects
    with gr.Row():
        with gr.Column(scale=2):
            image_data = gr.Image(label="Upload Image", type="numpy")
        with gr.Column(scale=1):
            model_prediction = gr.Text(label="Model Prediction", interactive=False)
    image_data.change(modelTraining, image_data, model_prediction)
    
    
# creates a local web server
# if share=True creates a public
# demo on huggingface.c
demo.launch(share=False)