My ‘snake’ will track a human face via a little bit of AI programming.

The ‘snake’ was driven directly from a computer running a Python program.

The head movement was via two servo motors in a pan/tilt arrangement, controlled by a Pololu Maestro controller.

Below is the Python code I used for this project.

import cv2
import sys
import datetime
import random
import win32api
import maestro
from maestro import Controller

pan = 7000  # Initial position of servo, centre (10000 is max turn)
tilt =7000 # Initial position of servo, centre

win = 10

x_mid = 200
y_mid = 160

servo = maestro.Controller('COM7')
servo.setTarget(0,pan)
servo.setTarget(1,tilt)
servo.close()

faceCascade = cv2.CascadeClassifier('C:\OpenCV ver 2\opencv\sources\data\haarcascades\haarcascade_frontalface_default.xml')
font = cv2.FONT_HERSHEY_PLAIN

counter = 0
move = 0
length = 400

video_capture = cv2.VideoCapture(0)

while True:
    ret, frame = video_capture.read()           # Capture frame-by-frame
    counter = counter + 1
    move = move + 10
    if counter > 1024:
        counter = 0
    if move > 150:
        move = 0
    cv2.putText(frame, 'Phils robotic snake', (10, 25), font, 1.0, (0, 255, 0), 1, 2)
    timenow = datetime.datetime.now().strftime("%a, %d %B %Y %H:%M:%S")
    cv2.putText(frame, timenow, (10, 45), font, 1.0, (0, 255, 0), 1, 2)
    cv2.putText(frame, 'Tracking', (580, 10), font, 1.0, (0, 255, 0), 1, 2)
    cv2.circle(frame, (400+move, 470), 2, (0, 255, 0), thickness=-1, lineType=8, shift=0)
    cv2.circle(frame, (550-move, 470), 2, (0, 255, 0), thickness=-1, lineType=8, shift=0)
    cv2.line(frame, (630, 10), (630,10+length), (0, 255, 0), thickness=1, lineType=8, shift=0)


    gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)

    faces = faceCascade.detectMultiScale(
        gray,
        scaleFactor=1.1,
        minNeighbors=7,
        minSize=(30, 30),
        flags=cv2.cv.CV_HAAR_SCALE_IMAGE
    )

    for (x, y, w, h) in faces:
        cv2.putText(frame, 'pan = ' + str(pan) + '   tilt = ' + str(tilt), (10, 475), font, 0.7, (0, 255, 0), 1, 2)
        cv2.putText(frame, 'x = ' + str(x - x_mid) + '   y = ' + str(y - y_mid), (10, 455), font, 0.7, (0, 255, 0), 1, 2)
        cv2.circle(frame, (x+w/2,y+h/2), 10, (0, 255, 0), thickness=1, lineType=8, shift=0)
        cv2.circle(frame, (x+w/2, y+h/2), 30, (0, 255, 0), thickness=1, lineType=8, shift=0)
        cv2.line(frame, (x+w/2,y+h/2-100), (x+w/2,y+h/2+100), (0, 255, 0), thickness=1, lineType=8, shift=0)
        cv2.line(frame, (x+w/2-100,y+h/2), (x+w/2+100,y+h/2), (0, 255, 0), thickness=1, lineType=8, shift=0)
        meter = x
        cv2.arrowedLine(frame, (630, 10+meter), (615, 10+meter), (0, 255, 0), thickness=2, shift=0, tipLength=0.2)
        cv2.putText(frame, 'Pheidi detected', (x+w/2+10,y), font, 1.0, (0, 0, 255), 1, 2)

        servo = maestro.Controller('COM7')
        if (x - x_mid) > win:
            pan = pan + ((x - x_mid)/2)
        if (x - x_mid) < -win:
            pan = pan - ((x_mid - x)/2)
        if (y - y_mid) > win:
            tilt = tilt - ((y - y_mid)/3)
        if (y - y_mid) < -win:
            tilt = tilt + ((y_mid - y)/3)

        if pan > 10000:
            pan = 10000
        if pan < 10:
            pan = 10
        if tilt > 10000:
            tilt = 10000
        if tilt < 10:
            tilt = 10
        servo.setTarget(0, pan)
        servo.setTarget(1, tilt)
        servo.close()

    if cv2.waitKey(1) & 0xFF == ord('q'):
        break
    cv2.namedWindow('Video', cv2.WINDOW_NORMAL)
    cv2.imshow('Video', frame)      # Display the resulting frame
# Release the capture
video_capture.release()
cv2.destroyAllWindows()