Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Labs/API_Labs/FLASK_GCP_LAB/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
*.pkl
42 changes: 42 additions & 0 deletions Labs/API_Labs/FLASK_GCP_LAB/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
from flask import Flask, request, jsonify
from predict import predict_iris
import os

app = Flask(__name__)

# Map numeric model output to human-readable class
label_map = {
0: "setosa",
1: "versicolor",
2: "virginica"
}

@app.route('/predict', methods=['POST'])
def predict():
data = request.get_json()

sepal_length = float(data['sepal_length'])
sepal_width = float(data['sepal_width'])
petal_length = float(data['petal_length'])
petal_width = float(data['petal_width'])

print(sepal_length, sepal_width, petal_length, petal_width)

# call model
prediction = predict_iris(sepal_length, sepal_width, petal_length, petal_width)

# convert numeric class → label string for frontend
try:
pred_int = int(prediction)
pred_label = label_map.get(pred_int, str(pred_int))
except Exception:
pred_label = str(prediction)

return jsonify({'prediction': pred_label})

if __name__ == '__main__':
app.run(
debug=True,
host="0.0.0.0",
port=int(os.environ.get("PORT", 8080))
)
Binary file modified Labs/API_Labs/FLASK_GCP_LAB/model/model.pkl
Binary file not shown.
19 changes: 19 additions & 0 deletions Labs/API_Labs/FLASK_GCP_LAB/predict.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import numpy as np
import joblib
import os
from train import run_training

# Load the trained model
model = joblib.load("model/model.pkl")

def predict_iris(sepal_length, sepal_width, petal_length, petal_width):
input_data = np.array([[sepal_length, sepal_width, petal_length, petal_width]])
prediction = model.predict(input_data)
return prediction[0]

if __name__ == "__main__":
if os.path.exists("model/model.pkl"):
print("Model loaded successfully")
else:
os.makedirs("model", exist_ok=True)
run_training()
31 changes: 26 additions & 5 deletions Labs/API_Labs/FLASK_GCP_LAB/src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,39 @@

app = Flask(__name__)

# Map numeric model output to human-readable class
label_map = {
0: "setosa",
1: "versicolor",
2: "virginica"
}

@app.route('/predict', methods=['POST'])
def predict():
data = request.get_json() # Get data as JSON
data = request.get_json()

sepal_length = float(data['sepal_length'])
sepal_width = float(data['sepal_width'])
sepal_width = float(data['sepal_width'])
petal_length = float(data['petal_length'])
petal_width = float(data['petal_width'])
petal_width = float(data['petal_width'])

print(sepal_length, sepal_width, petal_length, petal_width)

# call model
prediction = predict_iris(sepal_length, sepal_width, petal_length, petal_width)
return jsonify({'prediction': prediction})

# convert numeric class → label string for frontend
try:
pred_int = int(prediction)
pred_label = label_map.get(pred_int, str(pred_int))
except Exception:
pred_label = str(prediction)

return jsonify({'prediction': pred_label})

if __name__ == '__main__':
app.run(debug=True, host="0.0.0.0", port=int(os.environ.get("PORT", 8080)))
app.run(
debug=True,
host="0.0.0.0",
port=int(os.environ.get("PORT", 8080))
)
Binary file added Labs/API_Labs/FLASK_GCP_LAB/src/model/model.pkl
Binary file not shown.
23 changes: 17 additions & 6 deletions Labs/API_Labs/FLASK_GCP_LAB/src/test_api.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,29 @@
import requests
import json

url = 'http://127.0.0.1:5000/predict'
url = 'http://127.0.0.1:8080/predict'

data = {
payload = {
'sepal_length': 5.1,
'sepal_width': 3.5,
'petal_length': 1.4,
'petal_width': 0.2
}

response = requests.post(url, data=data)
headers = {
'Content-Type': 'application/json'
}

response = requests.post(url, data=json.dumps(payload), headers=headers)

print("Status:", response.status_code)
print("Body:", response.text)

if response.status_code == 200:
prediction = response.json()['prediction']
print('Predicted species:', prediction)
try:
prediction = response.json()['prediction']
print('Predicted species:', prediction)
except Exception as e:
print("Could not parse JSON:", e)
else:
print('Error:', response.status_code)
print('Error:', response.status_code)
52 changes: 29 additions & 23 deletions Labs/API_Labs/FLASK_GCP_LAB/src/train.py
Original file line number Diff line number Diff line change
@@ -1,34 +1,40 @@
import joblib
import os
import pandas as pd
from sklearn.linear_model import LogisticRegression
import joblib
import numpy as np
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import accuracy_score

def run_training():
# 1. Load dataset (Iris)
iris = load_iris()
X = iris.data # shape (150, 4)
y = iris.target # 0,1,2 classes

def run_training():
"""
Train the model
"""
# Read the training data
dataset = pd.read_csv('data/IRIS.csv')

# Split into labels and targets
X = dataset.drop("species", axis=1).copy()
y = dataset["species"].copy()

# Create train and test set
# 2. Train / test split (for sanity)
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=0.25, random_state=26)
X, y, test_size=0.2, random_state=42, stratify=y
)

# Training the model
model = LogisticRegression(random_state=26)
# 3. Train a model
model = RandomForestClassifier(
n_estimators=100,
random_state=42
)
model.fit(X_train, y_train)
model.feature_names = X.columns

# Persist the trained model
if not os.path.exists("../model"):
os.makedirs("../model")
joblib.dump(model, "../model/model.pkl")
# 4. Quick eval just so we know it learned
y_pred = model.predict(X_test)
acc = accuracy_score(y_test, y_pred)
print(f"Model accuracy: {acc:.3f}")

# 5. Make sure model/ directory exists
os.makedirs("model", exist_ok=True)

# 6. Save model to model/model.pkl
joblib.dump(model, "model/model.pkl")
print("Saved trained model to model/model.pkl")

if __name__ == "__main__":
run_training()
128 changes: 103 additions & 25 deletions Labs/API_Labs/FLASK_GCP_LAB/streamlit_app.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,105 @@
import streamlit as st
import pandas as pd
import requests
import os

st.title('IRIS Prediction')

sepal_length = st.number_input('Sepal Length', min_value=0.0, max_value=10.0, step=0.1)
sepal_width = st.number_input('Sepal Width', min_value=0.0, max_value=10.0, step=0.1)
petal_length = st.number_input('Petal Length', min_value=0.0, max_value=10.0, step=0.1)
petal_width = st.number_input('Petal Width', min_value=0.0, max_value=10.0, step=0.1)

if st.button('Predict'):
data = {
'sepal_length': sepal_length,
'sepal_width': sepal_width,
'petal_length': petal_length,
'petal_width': petal_width
}
try:
response = requests.post('https://iris-app-155173250771.us-central1.run.app/predict', json=data)
if response.status_code == 200:
prediction = response.json()['prediction']
st.success(f'Predicted species: {prediction}')
else:
st.error(f'Error occurred during prediction. Status code: {response.status_code}')
except requests.exceptions.RequestException as e:
st.error(f'Error occurred during prediction: {str(e)}')
import json
from io import BytesIO

st.set_page_config(page_title="Iris Species Predictor", page_icon="🌸", layout="centered")
st.title("🌸 Iris Flower Species Prediction App")

PREDICT_URL = "http://127.0.0.1:8080/predict" # your local Flask API
headers = {"Content-Type": "application/json"}

st.markdown(
"""
This app connects to a Flask API that predicts the **Iris flower species**
based on sepal and petal dimensions.
You can try single predictions or upload a CSV for batch results.
"""
)

option = st.sidebar.radio("Select Mode:", ["Single Prediction", "Batch Prediction (CSV)"])

if option == "Single Prediction":
st.subheader("🔹 Enter Flower Measurements")

sepal_length = st.number_input("Sepal Length (cm)", min_value=0.0, max_value=10.0, step=0.1)
sepal_width = st.number_input("Sepal Width (cm)", min_value=0.0, max_value=10.0, step=0.1)
petal_length = st.number_input("Petal Length (cm)", min_value=0.0, max_value=10.0, step=0.1)
petal_width = st.number_input("Petal Width (cm)", min_value=0.0, max_value=10.0, step=0.1)

if st.button("Predict"):
payload = {
"sepal_length": sepal_length,
"sepal_width": sepal_width,
"petal_length": petal_length,
"petal_width": petal_width,
}

try:
response = requests.post(PREDICT_URL, data=json.dumps(payload), headers=headers)

if response.status_code == 200:
prediction = response.json()["prediction"]
st.success(f"🌼 Predicted species: **{prediction}**")
else:
st.error(f"❌ API Error: Status code {response.status_code}")

except requests.exceptions.RequestException as e:
st.error(f"⚠️ Connection error: {str(e)}")

else:
st.subheader("📂 Upload CSV for Batch Prediction")

st.markdown(
"""
**Required columns:**
`sepal_length`, `sepal_width`, `petal_length`, `petal_width`
"""
)

uploaded_file = st.file_uploader("Choose a CSV file", type=["csv"])

if uploaded_file is not None:
df = pd.read_csv(uploaded_file)
st.write("✅ Uploaded data preview:")
st.dataframe(df.head())

if st.button("Run Batch Predictions"):
predictions = []
for _, row in df.iterrows():
payload = {
"sepal_length": row["sepal_length"],
"sepal_width": row["sepal_width"],
"petal_length": row["petal_length"],
"petal_width": row["petal_width"],
}

try:
resp = requests.post(PREDICT_URL, data=json.dumps(payload), headers=headers)
if resp.status_code == 200:
pred = resp.json().get("prediction", "Error")
else:
pred = f"Error {resp.status_code}"
except Exception as e:
pred = f"Error: {str(e)}"

predictions.append(pred)

df["prediction"] = predictions
st.success("🎉 Batch prediction completed!")
st.dataframe(df)

# Download results as CSV
output = BytesIO()
df.to_csv(output, index=False)
output.seek(0)
st.download_button(
label="⬇️ Download Predictions as CSV",
data=output,
file_name="iris_predictions.csv",
mime="text/csv",
)

st.markdown("---")
st.caption("Developed by Shivie Saksenaa | MLOps Flask + Streamlit Lab")
29 changes: 29 additions & 0 deletions Labs/API_Labs/FLASK_GCP_LAB/test_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import requests
import json

url = 'http://127.0.0.1:8080/predict'

payload = {
'sepal_length': 5.1,
'sepal_width': 3.5,
'petal_length': 1.4,
'petal_width': 0.2
}

headers = {
'Content-Type': 'application/json'
}

response = requests.post(url, data=json.dumps(payload), headers=headers)

print("Status:", response.status_code)
print("Body:", response.text)

if response.status_code == 200:
try:
prediction = response.json()['prediction']
print('Predicted species:', prediction)
except Exception as e:
print("Could not parse JSON:", e)
else:
print('Error:', response.status_code)
Loading