File size: 4,894 Bytes
dc91da8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
import pandas as pd
import numpy as np
import joblib
import gradio as gr




# Load the preprocessing steps and the model
label_encoders = joblib.load('label_encoders.pkl')
one_hot_encoder = joblib.load('one_hot_encoder.pkl')
min_max_scaler = joblib.load('min_max_scaler.pkl')
model = joblib.load('logistic_regression_model.pkl')
le_target = joblib.load('label_encoder_target.pkl')









def preprocess_data(data):
 
    df = pd.DataFrame([data])

    label_encode_cols = ["Partner", "Dependents", "PhoneService", "PaperlessBilling", "gender"]
    one_hot_encode_cols = ["MultipleLines", "InternetService", "OnlineSecurity", "OnlineBackup",
                           "DeviceProtection", "TechSupport", "StreamingTV", "StreamingMovies",
                           "Contract", "PaymentMethod"]
    min_max_scale_cols = ["tenure", "MonthlyCharges", "TotalCharges"]

    # Strip leading and trailing spaces from string inputs
    for col in label_encode_cols + one_hot_encode_cols:
        df[col] = df[col].str.strip()

    # Convert non-numeric values to NaN and fill them with the mean of the column
    df[min_max_scale_cols] = df[min_max_scale_cols].replace(' ', np.nan).astype(float)
    df[min_max_scale_cols] = df[min_max_scale_cols].fillna(df[min_max_scale_cols].mean())

    # Label encode specified columns
    for col in label_encode_cols:
        le = label_encoders[col]
        df[col] = le.transform(df[col])

    # One-hot encode specified columns
    one_hot_encoded = one_hot_encoder.transform(df[one_hot_encode_cols])

    # Min-max scale specified columns
    scaled_numerical = min_max_scaler.transform(df[min_max_scale_cols])



    # Combine processed columns into one DataFrame
    X_processed = np.hstack((df[label_encode_cols].values, scaled_numerical, one_hot_encoded))

    return X_processed

def predict(gender, senior_citizen, partner, dependents, tenure, phone_service, multiple_lines, internet_service,

            online_security, online_backup, device_protection, tech_support, streaming_tv, streaming_movies,

            contract, paperless_billing, payment_method, monthly_charges, total_charges):
    
    data = {
        "gender": gender,
        "SeniorCitizen": senior_citizen,
        "Partner": partner,
        "Dependents": dependents,
        "tenure": tenure,
        "PhoneService": phone_service,
        "MultipleLines": multiple_lines,
        "InternetService": internet_service,
        "OnlineSecurity": online_security,
        "OnlineBackup": online_backup,
        "DeviceProtection": device_protection,
        "TechSupport": tech_support,
        "StreamingTV": streaming_tv,
        "StreamingMovies": streaming_movies,
        "Contract": contract,
        "PaperlessBilling": paperless_billing,
        "PaymentMethod": payment_method,
        "MonthlyCharges": monthly_charges,
        "TotalCharges": total_charges
    }

    try:
        X_new = preprocess_data(data)
        prediction = model.predict(X_new)
        prediction = le_target.inverse_transform(prediction)
        return "Churn" if prediction[0] == 'Yes' else "No Churn"
    except Exception as e:
        print("Error during prediction:", e)
        return str(e)

# Define the Gradio interface
inputs = [
    gr.Radio(label="Gender", choices=["Female", "Male"]),
    gr.Number(label="Senior Citizen (0 or 1)"),
    gr.Radio(label="Partner", choices=["Yes", "No"]),
    gr.Radio(label="Dependents", choices=["Yes", "No"]),
    gr.Number(label="Tenure (integer)"),
    gr.Radio(label="Phone Service", choices=["Yes", "No"]),
    gr.Radio(label="Multiple Lines", choices=["Yes", "No", "No phone service"]),
    gr.Radio(label="Internet Service", choices=["DSL", "Fiber optic", "No"]),
    gr.Radio(label="Online Security", choices=["Yes", "No", "No internet service"]),
    gr.Radio(label="Online Backup", choices=["Yes", "No", "No internet service"]),
    gr.Radio(label="Device Protection", choices=["Yes", "No", "No internet service"]),
    gr.Radio(label="Tech Support", choices=["Yes", "No", "No internet service"]),
    gr.Radio(label="Streaming TV", choices=["Yes", "No", "No internet service"]),
    gr.Radio(label="Streaming Movies", choices=["Yes", "No", "No internet service"]),
    gr.Radio(label="Contract", choices=["Month-to-month", "One year", "Two year"]),
    gr.Radio(label="Paperless Billing", choices=["Yes", "No"]),
    gr.Radio(label="Payment Method", choices=["Electronic check", "Mailed check", "Bank transfer (automatic)", "Credit card (automatic)"]),
    gr.Number(label="Monthly Charges (float)"),
    gr.Number(label="Total Charges (float)")
]

outputs = gr.Textbox(label="Prediction")

# Create the Gradio interface
gr.Interface(fn=predict, inputs=inputs, outputs=outputs, title="Churn Prediction Model").launch()