diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..b736ca36 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,17 @@ +# syntax=docker/dockerfile:1 + +FROM python:3.11 + +WORKDIR /code + +COPY requirements.txt . + +RUN pip install --no-cache-dir --upgrade -r requirements.txt + +COPY . . + +RUN chmod -R 755 combined_demo/input-imgs/ combined_demo/output-imgs/ + +EXPOSE 3100 + +CMD ["gunicorn", "main:app"] diff --git a/color_analysis/color_analysis_model.py b/color_analysis/color_analysis_model.py index 42711769..407f582d 100644 --- a/color_analysis/color_analysis_model.py +++ b/color_analysis/color_analysis_model.py @@ -135,7 +135,6 @@ def load_pretrained_model(model, save_path='color_analysis/trained_model.pth', d if os.path.exists(save_path): model.load_state_dict(torch.load(save_path, map_location=device, weights_only=True)) model.eval() - print(f"Loaded pretrained model from {save_path}") else: print("No pretrained model found. Training from scratch.") return model @@ -163,7 +162,6 @@ def main(): ]) # Create training and testing sets - print("Loading and splitting data...") train_set, test_set, train_labels, test_labels, class_labels = load_and_split_data(data_dir) train_dataset = ColorDataset(train_set, train_labels, transform=transform) test_dataset = ColorDataset(test_set, test_labels, transform=transform) @@ -182,12 +180,10 @@ def main(): print("Training the model...") train_model(model, train_loader, criterion, optimizer, num_epochs=num_epochs, device=device, save_path=save_path) - print("Testing the model...") test_loss, test_accuracy = test_model(model, test_loader, device=device) print(f"Test Loss: {test_loss:.4f}, Test Accuracy: {test_accuracy:.2f}%") # Predict the season of the input image with the trained model - print(f"Predicting season for image: {args.image_path}") predicted_season = predict_image(model, args.image_path, class_labels, transform, device=device) if predicted_season: print(f"Predicted Season: {predicted_season}") diff --git a/face_detect/facedec/facedetect.py b/face_detect/facedec/facedetect.py index 9e032c78..616a7e57 100644 --- a/face_detect/facedec/facedetect.py +++ b/face_detect/facedec/facedetect.py @@ -3,9 +3,9 @@ from typing import Tuple, Union import cv2 +import imutils import numpy as np from PIL import Image -import imutils MARGIN = 10 # pixels ROW_SIZE = 10 # pixels @@ -71,6 +71,8 @@ def visualize(image, detection_result) -> np.ndarray: return annotated_image, cropped +import os + # STEP 1: Import the necessary modules. import mediapipe as mp from mediapipe.tasks import python @@ -96,14 +98,22 @@ def visualize(image, detection_result) -> np.ndarray: image_copy = np.copy(image.numpy_view()) annotated_image, cropped = visualize(image_copy, detection_result) cropped = imutils.resize(cropped, width=800) +cropped_pil = Image.fromarray(cv2.cvtColor(cropped, cv2.COLOR_BGR2RGB)) img = cv2.imread(IMAGE_FILE) rgb_annotated_image = cv2.cvtColor(annotated_image, cv2.COLOR_BGR2RGB) rgb_annotated_image = imutils.resize(rgb_annotated_image, width=800) +rgb_annotated_image_pil = Image.fromarray(cv2.cvtColor(rgb_annotated_image, cv2.COLOR_BGR2RGB)) # SAVE CROPPED AS IMAGE: cv2.imwrite("cropped.jpg", cropped) - -cv2.imwrite("combined_demo/output-imgs/redbox.jpg", rgb_annotated_image) -cv2.imwrite("combined_demo/output-imgs/cropped.jpg", cropped) +# switch to os for saving images +out_dir = "combined_demo/output-imgs" +if not os.path.exists(out_dir): + os.mkdir(out_dir) +rgb_annotated_image_pil.save(os.path.join(out_dir, "redbox.jpg")) +cropped_pil.save(os.path.join(out_dir, "cropped.jpg")) + +# cv2.imwrite("combined_demo/output-imgs/redbox.jpg", rgb_annotated_image) +# cv2.imwrite("combined_demo/output-imgs/cropped.jpg", cropped) # cv2.imshow("Gotchaface", rgb_annotated_image) # cv2.waitKey(0) diff --git a/gunicorn.conf.py b/gunicorn.conf.py new file mode 100644 index 00000000..3b5188cd --- /dev/null +++ b/gunicorn.conf.py @@ -0,0 +1,12 @@ +# Gunicorn configuration file +import multiprocessing + +max_requests = 1000 +max_requests_jitter = 50 + +log_file = "-" + +bind = "0.0.0.0:3100" + +worker_class = "uvicorn.workers.UvicornWorker" +workers = (multiprocessing.cpu_count() * 2) + 1 diff --git a/requirements.txt b/requirements.txt index 0af32207..356295ce 100644 --- a/requirements.txt +++ b/requirements.txt @@ -17,8 +17,4 @@ uvicorn[standard] sqlmodel gunicorn SQLAlchemy -pydantic -sklearn -argparse -PIL -os \ No newline at end of file +pydantic \ No newline at end of file