Skip to content

Electronics Processes | Inputs & Outputs

In Re-Humanizing Sensing, the part that will dictate how the project will be shaped is whether I can connect and network across countries as I am planning. The first step is to set-up my Raspberry Pi with the Facial Emotion Recognition (FER), my ESP32 to accept the code and translate it, and most importantly, connecting them through an online server.

Raspberry Pi: Facial Recognition

As this is the most difficult and time consuming part of my project I began by finding a pre-trained model of facial emotion recognition and set up the Raspberry Pi.

Raspberry Pi Pre-trained Model Used

In my case, I do not want to have a label for the expressions that are made. I would like it to just point out the expressions. Instead of 'happiness' on the right image I would rather say 'smiling'. In the code below, it also assigns colors to each emotion along with intensity depending on its probability. I would translate that intensity from labeling the emotion to showcasing the percentages of the expressions: '30% smiling'.

Python Code for FER
import cv2
import numpy as np
import dlib
from imutils import face_utils
import requests
from keras.models import load_model
from statistics import mode
from utils.datasets import get_labels
from utils.inference import detect_faces, draw_text, draw_bounding_box, apply_offsets, load_detection_model
from utils.preprocessor import preprocess_input

# Function to convert color (RGB) to an integer
def color_to_int(color):
    return (color[0] << 16) + (color[1] << 8) + color[2]

# Function to send data to ThingSpeak
def send_to_thingspeak(color, intensity, write_key):
    color_int = color_to_int(color)  # Convert color to integer
    base_url = "https://api.thingspeak.com/update"
    data = {
        'api_key': write_key,
        'field1': color_int,
        'field2': intensity
    }
    try:
        response = requests.get(base_url, params=data)
        if response.status_code == 200:
            print("Data sent to ThingSpeak")
        else:
            print("Failed to send data to ThingSpeak")
    except Exception as e:
        print(f"Error: {e}")

# parameters for loading data and images
emotion_model_path = './models/emotion_model.hdf5'
emotion_labels = get_labels('fer2013')
frame_window = 10
emotion_offsets = (20, 40)

# loading models
detector = dlib.get_frontal_face_detector()
emotion_classifier = load_model(emotion_model_path)
emotion_target_size = emotion_classifier.input_shape[1:3]
emotion_window = []

# setting up webcam
cv2.namedWindow('window_frame')
cap = cv2.VideoCapture(0)

while cap.isOpened():
    ret, bgr_image = cap.read()
    gray_image = cv2.cvtColor(bgr_image, cv2.COLOR_BGR2GRAY)
    rgb_image = cv2.cvtColor(bgr_image, cv2.COLOR_BGR2RGB)
    faces = detector(rgb_image)

    for face_coordinates in faces:
        x1, x2, y1, y2 = apply_offsets(face_utils.rect_to_bb(face_coordinates), emotion_offsets)
        gray_face = gray_image[y1:y2, x1:x2]
        try:
            gray_face = cv2.resize(gray_face, (emotion_target_size))
        except:
            continue

        gray_face = preprocess_input(gray_face, True)
        gray_face = np.expand_dims(gray_face, 0)
        gray_face = np.expand_dims(gray_face, -1)
        emotion_prediction = emotion_classifier.predict(gray_face)
        emotion_probability = np.max(emotion_prediction)
        emotion_label_arg = np.argmax(emotion_prediction)
        emotion_text = emotion_labels[emotion_label_arg]
        emotion_window.append(emotion_text)

        if len(emotion_window) > frame_window:
            emotion_window.pop(0)
        try:
            emotion_mode = mode(emotion_window)
        except:
            continue

        # Define color based on emotion
        color = np.array([0, 0, 0])  # Default color
        if emotion_text == 'angry':
            color = np.asarray((255, 0, 0))
        elif emotion_text == 'sad':
            color = np.asarray((0, 0, 255))
        elif emotion_text == 'happy':
            color = np.asarray((255, 255, 0))
        elif emotion_text == 'surprise':
            color = np.asarray((0, 255, 255))
        else:  # Other emotions
            color = np.asarray((0, 255, 0))

        # Adjust color intensity based on probability
        color = (color * emotion_probability).astype(int)
        color = color.tolist()

        # Send data to ThingSpeak
        send_to_thingspeak(color, emotion_probability, 'S2UGN2SQ8R2AZNEA')

        draw_bounding_box(face_utils.rect_to_bb(face_coordinates), rgb_image, color)
        draw_text(face_utils.rect_to_bb(face_coordinates), rgb_image, emotion_mode, color, 0, -45, 1, 1)

    bgr_image = cv2.cvtColor(rgb_image, cv2.COLOR_RGB2BGR)
    cv2.imshow('window_frame', bgr_image)
    if cv2.waitKey(1) & 0xFF == ord('q'):
        break

cap.release()
cv2.destroyAllWindows()

Server: Thingspeak

To have the Raspberry Pi and ESP32 talk to one another, I need an online server. An initial suggestion by my classmate Grecia Bello is to use Thingspeak. Its main benefits are:

  • See instant visualizations of data posted by your devices or equipment
  • Execute MATLAB® code in ThingSpeak, perform online analysis and data processing with live data, and see automatic visualizations
  • Build IoT systems without setting up servers or developing web software

In Thingspeak, I will create a channel where it will save information from the Raspberry Pi and then the ESP32 will retrieve the information from there.

ChatGPTs General Set-Up Advice:

  1. Create a ThingSpeak Channel:
    • Log in to ThingSpeak and create a new channel.
    • Define fields that correspond to the different emotions your system will recognize (e.g., happiness, sadness, anger, etc.).
    • Note the Channel ID and Write API Key for the Raspberry Pi, and the Read API Key for the ESP32.
  2. Programming the Raspberry Pi
    • Install Necessary Libraries:
    • Ensure you have libraries for facial emotion recognition (like OpenCV, TensorFlow, or similar) and for HTTP requests (like requests in Python).
  3. Facial Emotion Recognition:
    • Implement the facial emotion recognition. This typically involves capturing images from a camera, processing the images to detect faces, and then classifying the emotions of the detected faces.
  4. Sending Data to ThingSpeak:
    • Once an emotion is recognized, format the data to match the ThingSpeak channel fields.
    • Use the requests library to send an HTTP POST request to ThingSpeak with the emotion data and your Write API Key.

ESP32: Light

To set up the ESP32 with Arduino IDE, Grecia Bello has very good documentation to do follow.

This code is still joining the raspberry pi with esp32 directly not using thingspeak

Code for ESP32 to change RGB LED
#include <WiFi.h>
#include <HTTPClient.h>

const char* ssid = "YourSSID";
const char* password = "YourPassword";
const char* thingspeak_read_api_key = "M6NP3QNQUWXCWY9F";
const char* thingspeak_channel_id = "2406762";

const int redPin = 13;
const int greenPin = 12;
const int bluePin = 27;

void setup() {
  Serial.begin(115200);
  pinMode(redPin, OUTPUT);
  pinMode(greenPin, OUTPUT);
  pinMode(bluePin, OUTPUT);
  WiFi.begin(ssid, password);
  while (WiFi.status() != WL_CONNECTED) {
    delay(500);
    Serial.println("Connecting to WiFi...");
  }
  Serial.println("Connected to WiFi");
}

void loop() {
  if(WiFi.status() == WL_CONNECTED) {
    HTTPClient http;
    String serverPath = "http://api.thingspeak.com/channels/" + String(thingspeak_channel_id) + "/feeds/last.json?api_key=" + String(thingspeak_read_api_key);

    http.begin(serverPath);
    int httpResponseCode = http.GET();

    if(httpResponseCode == 200) {
      String payload = http.getString();

      // Parse payload to extract color integer and intensity
      int colorStartIndex = payload.indexOf("field1\":\"") + 9;
      int colorEndIndex = payload.indexOf("\",\"field2\"");
      int colorData = payload.substring(colorStartIndex, colorEndIndex).toInt();

      int redValue = (colorData >> 16) & 0xFF;   // Extract red component
      int greenValue = (colorData >> 8) & 0xFF;  // Extract green component
      int blueValue = colorData & 0xFF;          // Extract blue component

      int intensityStartIndex = payload.indexOf("field2\":\"") + 9;
      int intensityEndIndex = payload.indexOf("\"}", intensityStartIndex);
      float intensity = payload.substring(intensityStartIndex, intensityEndIndex).toFloat();

      // Set the RGB LED color with intensity adjustment
      analogWrite(redPin, redValue * intensity);
      analogWrite(greenPin, greenValue * intensity);
      analogWrite(bluePin, blueValue * intensity);
    } else {
      Serial.println("Error on HTTP request");
    }

    http.end();
  }
  delay(10000); // Wait for 10 seconds before updating the color
}

Bill of Materials

Quantity Price
ESP32 microcontroller 1 $20
WS8213b LED strip 8meters/960 LEDs $84
12V Air Pump 2 $20
Relays 2 $8

Translations

This project intentionally engages the senses of touch, hearing, and sight to translate the subjective personal experience of emotions into a physiological interaction, invoking specific responses within the output community. To inform the approach, a form was designed and distributed that asked participants about their associations between each emotion and specific colors. Based on this feedback, each emotion was studied, assigned a color, and then given a specific duration for inflating the biobots. This methodical approach, guided by community input, ensures that the viewers of the output community receive a multi-sensory interpretation of emotions.

Case 1:

Emotion: Anger

Anger is commonly characterized by feelings of annoyance, displeasure, hostility, and agitation. Physiologically, it triggers a rise in heart rate and blood pressure, heavier breathing, an increase in body temperature, perspiration, facial flushing, trembling, chest tightness, and muscle tension. These responses are culturally encapsulated in phrases like “red with anger” or “seeing red.”

In the performance, this is represented by LEDs that turn to a bright, intense red. Both pumps operate for a full minute, causing the viewers to feel discomfort and agitation. Tthe sound is loud for an extended period, everything is covered in a red hue, and the visibly strained biobots that look like they might explode at any second contribute to a sense of imminent danger, effectively mirroring the explosive potential of anger.

Case 2:

Emotion: Sadness

Sadness is typically associated with feelings of sorrow, grief, and a sense of loss. Physiologically, it can manifest as a slow heart rate, shallow breathing, a drop in energy and muscle tone, and an overall sensation of lethargy. People may experience tearing, a lump in the throat, and a downturn in posture. These elements encapsulate the heaviness that sadness imposes on the body. Common expressions that reflect sadness in various cultures include “feeling blue” or “down in the dumps.”

In the performance, the LEDs emit a soft blue light to represent sadness, with a subdued intensity that mirrors the emotion’s quiet nature. The pumps operate slowly and intermittently, creating a gentle inflation and deflation of the biobots that resembles a sighing motion. This setup aims to evoke a reflective and somber mood among viewers, reinforcing the inward and subdued nature of sadness.

Case 3:

Emotion: Fear

Fear is typically characterized by feelings of threat, anxiety, and anticipation of potential danger. Physiologically, it triggers an increased heart rate, rapid breathing, and heightened senses, which prepare the body for a fight-or-flight response. Other common symptoms include dilated pupils, cold sweats, and an adrenaline surge, which can lead to shaking. Phrases like “frozen with fear” or “scared stiff” capture the immobilizing aspect of this emotion.

In the performance, the LEDs are set to emit a dim grey light, symbolizing the uncertain and shadowy nature of fear. The pumps activate erratically, causing the biobots to inflate and deflate unpredictably. This irregular motion, combined with the unsettling grey ambiance, is designed to unsettle the viewers, mimicking the disorienting and chilling experience of fear.

Case 4:

Emotion: Disgust

Disgust is an emotion characterized by revulsion and a strong desire to avoid or expel something offensive. Physiologically, it may manifest as a wrinkled nose, a constricted throat, and a sensation of nausea, which can lead to gagging or retching. The body’s reaction aims to protect from ingesting harmful substances. Common expressions that convey disgust include “turning one’s stomach” or “makes my skin crawl.”

In the performance, the LEDs display a stark green light, chosen for its association with sickness and toxicity, to represent disgust. The pumps operate in a quick, forceful manner, causing the biobots to expand and contract abruptly. This sudden movement, combined with the vivid green light, aims to provoke a visceral reaction in viewers, encapsulating the essence of aversion and repulsion typical of disgust.

Case 5:

Emotion: Neutral

Neutrality is characterized by a state of calmness, balance, and lack of strong emotional expressions. Physiologically, it is associated with normal or baseline levels of heart rate, breathing, and muscle tension, reflecting a state of relaxation and alertness without agitation. Expressions such as “even-keeled” or “maintaining composure” often describe this balanced emotional state.

In the performance, the LEDs emit a clear, steady white light, symbolizing clarity and the absence of emotional disturbance. The pumps do not activate, allowing the biobots to remain static. This absence of movement emphasizes the stability and calm inherent in a neutral state, offering viewers a moment of peace and balance amid the more dynamic representations of other emotions.

Case 6:

Emotion: Surprise

Surprise is characterized by a sudden and often unexpected emotional response, typically involving heightened alertness and a quick physiological reaction. Physiologically, it manifests as a brief spike in heart rate, widened eyes, and an involuntary gasp, preparing the body for rapid adaptation to new information. Common phrases that capture this emotion include “taken aback” or “caught off guard.”

In the performance, the LEDs shine in a vibrant teal color, chosen for its vivid and unexpected hue, to represent the element of surprise. The pumps activate suddenly, causing the biobots to inflate quickly and then hold their form momentarily. This abrupt and dramatic change, combined with the striking teal light, is designed to mimic the startling and captivating nature of surprise, engaging viewers in a moment of awe and wonder.

Case 7:

Emotion: Happy

Happiness is characterized by feelings of joy, contentment, and satisfaction. Physiologically, it leads to an increase in dopamine and serotonin levels, producing smiles, laughter, and a general sense of well-being. Physiological signs also include relaxed muscles and an overall energized yet comfortable state. Expressions like “beaming with joy” or “on cloud nine” are often used to describe this uplifting emotion.

In the performance, the LEDs emit a bright and cheerful yellow light, symbolizing the warmth and positivity associated with happiness. The pumps gently and rhythmically inflate the biobots, creating a playful and buoyant movement. This lively and inviting atmosphere is designed to evoke the uplifting and light-hearted essence of happiness, encouraging viewers to feel a sense of joy and elation.

Final Code

#include <WiFi.h>
#include <HTTPClient.h>
#include <Adafruit_NeoPixel.h>

const char* ssid = "Iaac-Wifi P100";
const char* password = "EnterIaac22@";
const char* thingspeak_read_api_key = "M6NP3QNQUWXCWY9F";
const char* thingspeak_channel_id = "2406762";

#define LED_PIN 27
#define NUM_LEDS 400
Adafruit_NeoPixel strip(NUM_LEDS, LED_PIN, NEO_GRB + NEO_KHZ800);

int currentRed = 0, currentGreen = 0, currentBlue = 0;
const int relayPin1 = 33;  // Pump 1
const int relayPin2 = 13;  // Pump 2

unsigned long previousMillis = 0;
const long interval = 5000;

unsigned long lastPumpMillis1 = 0;
unsigned long lastPumpMillis2 = 0;
bool pumpState1 = false;
bool pumpState2 = false;
float lastIntensity = -1;
long colorData = 0;
float intensity = 0;
bool transitioning = false;
int emotionNumber = 0;

void setup() {
  Serial.begin(115200);
  WiFi.begin(ssid, password);
  while (WiFi.status() != WL_CONNECTED) {
    delay(500);
    Serial.println("Connecting to WiFi...");
  }
  Serial.println("WiFi Connected Successfully");
  Serial.print("IP Address: ");
  Serial.println(WiFi.localIP());

  strip.begin();
  strip.show();
  pinMode(relayPin1, OUTPUT);
  pinMode(relayPin2, OUTPUT);
  digitalWrite(relayPin1, LOW);
  digitalWrite(relayPin2, LOW);
}

void transitionToColor() {
  static unsigned long lastTransitionMillis = 0;
  static int step = 0;
  const int transitionSteps = 50;
  const int stepDelay = 10;

  if (!transitioning) return;

  unsigned long currentMillis = millis();
  if (currentMillis - lastTransitionMillis > stepDelay) {
    float progress = (float)step / (float)transitionSteps;
    int redValue = currentRed + (int)((colorData >> 16 & 0xFF) * intensity - currentRed) * progress;
    int greenValue = currentGreen + (int)((colorData >> 8 & 0xFF) * intensity - currentGreen) * progress;
    int blueValue = currentBlue + (int)((colorData & 0xFF) * intensity - currentBlue) * progress;

    for (int i = 0; i < NUM_LEDS; i++) {
      strip.setPixelColor(i, strip.Color(redValue, greenValue, blueValue));
    }
    strip.show();

    lastTransitionMillis = millis();
    step++;

    if (step > transitionSteps) {
      transitioning = false;
      step = 0;
      currentRed = redValue;
      currentGreen = greenValue;
      currentBlue = blueValue;
    }
  }
}

void pumpControl() {
  unsigned long currentMillis = millis();
  Serial.println("PUMP CONTROL");

  // If intensity is less than 0.3, keep pumps off
  if (intensity <= 0.3) {
    digitalWrite(relayPin1, LOW);
    digitalWrite(relayPin2, LOW);
    Serial.println("LOW INTENSITY PUMP OFF");
    return;
  }

  // Turn on pumps based on detected emotion
  if (pumpState1 && (currentMillis - lastPumpMillis1 > 5000)) {  // Pump on duration
    digitalWrite(relayPin1, LOW);                                     // Turn pump 1 off
    pumpState1 = false;
    Serial.println("Pump 1 turned off");
    return;
  } 
  if (pumpState2 && (currentMillis - lastPumpMillis2 > 10000)) {  // Pump on duration
    digitalWrite(relayPin2, LOW);                                       // Turn pump 2 off
    pumpState2 = false;
    Serial.println("Pump 2 turned off");
    return;
  } 
  if (!pumpState1 && !pumpState2) {
    // Check detected emotion and turn on respective pumps
    Serial.println("pumping");
    Serial.println(emotionNumber);
    if (intensity > 0.3) {
      if (emotionNumber == 1) {         // Angry
        digitalWrite(relayPin1, HIGH);  // Turn pump 1 on
        digitalWrite(relayPin2, HIGH);  // Turn pump 2 on
        pumpState1 = true;
        pumpState2 = true;
        lastPumpMillis1 = currentMillis;
        lastPumpMillis2 = currentMillis;
        Serial.println("Pump 1 and 2 turned on (Angry)");
      } else if (emotionNumber == 2) {  // Sad
        digitalWrite(relayPin2, HIGH);  // Turn pump 2 on
        pumpState2 = true;
        lastPumpMillis2 = currentMillis;
        Serial.println("Pump 2 turned on (Sad)");
      } else if (emotionNumber == 3) {  // Happy
        digitalWrite(relayPin1, HIGH);  // Turn pump 1 on
        pumpState1 = true;
        lastPumpMillis1 = currentMillis;
        Serial.println("Pump 1 turned on (Happy)");
      } else if (emotionNumber == 4) {  // Surprise
        digitalWrite(relayPin1, HIGH);  // Turn pump 1 on
        pumpState1 = true;
        lastPumpMillis1 = currentMillis;
        Serial.println("Pump 1 turned on (Surprise)");
      } else if (emotionNumber == 5) {  // Disgust
        digitalWrite(relayPin2, HIGH);  // Turn pump 2 on
        pumpState2 = true;
        lastPumpMillis2 = currentMillis;
        Serial.println("Pump 2 turned on (Disgust)");
      } else if (emotionNumber == 6) {  // Fear
        digitalWrite(relayPin1, HIGH);  // Turn pump 1 on
        digitalWrite(relayPin2, HIGH);  // Turn pump 2 on
        pumpState1 = true;
        pumpState2 = true;
        lastPumpMillis1 = currentMillis;
        lastPumpMillis2 = currentMillis;
        Serial.println("Pump 1 and 2 turned on (Fear)");
      }
    }
  } else {
    Serial.println("DOING NOTHING");
  }
}

void fetchData() {
  unsigned long currentMillis = millis();
  if (currentMillis - previousMillis > interval) {
    HTTPClient http;
    String serverPath = "http://api.thingspeak.com/channels/" + String(thingspeak_channel_id) + "/feeds/last.json?api_key=" + String(thingspeak_read_api_key);
    http.begin(serverPath);
    int httpResponseCode = http.GET();

    if (httpResponseCode == 200) {
      String payload = http.getString();
      Serial.println("Received data from ThingSpeak:");
      Serial.println(payload);

      long newColorData = strtol(payload.substring(payload.indexOf("field1\":\"") + 9, payload.indexOf("\",\"field2\"")).c_str(), NULL, 16);
      float newIntensity = payload.substring(payload.indexOf("field2\":\"") + 9, payload.indexOf("\"}")).toFloat();
      int newEmotionNumber = payload.substring(payload.indexOf("field3\":\"") + 9, payload.indexOf("}")).toInt();
      Serial.println(newEmotionNumber);

      if (newIntensity == 0) { // No faces detected, turn off the light
        colorData = 0;
        intensity = 0;
      } else if (newColorData != colorData || newIntensity != intensity || newEmotionNumber != emotionNumber) {
        colorData = newColorData;
        intensity = newIntensity;
        emotionNumber = newEmotionNumber;
        transitioning = true;            // Start new transition
        previousMillis = currentMillis;  // Update time stamp for fetch interval
      }
    } else {
      Serial.print("Error on HTTP request, code: ");
      Serial.println(httpResponseCode);
    }
    http.end();
  }
}

void loop() {
  fetchData();
  transitionToColor();
  pumpControl();

}