Project:Shitcam: Difference between revisions

From London Hackspace Wiki
m (moved Projects/Shitcam to Project:Shitcam: Moving namespaces)
(No difference)

Revision as of 02:02, 14 November 2011

In the spirit of the intrepid hackers who have adorned our 'space with webcams in almost every corner, here is my humble contribution -

How It Works

The shitcam has been placed in a strategic, discrete location in the hackspace to capture people when sitting down to use the facilities. Instead of sending out a video stream, the raw data is continuously processed by a Processing sketch which uses OpenCV to implement face detection. When a face is detected a snapshot is uploaded to twitpic, using the twitpic4p library.

Before being twittered, the image is cropped to the approximate area of the detected face. This is important in order to respect people's privacy, and to avoid accidental prickpics, twatpics et c. Under testing I've found that extraneous bodyparts have only appeared when they actually look like faces.

Access

To see the latest images look for london hackspace #shitface or go straight here.

Future Improvements

The picture quality is not great. Perhaps the could be a way to trigger a dslr when a face is detected, but focus might be a problem. I would also like to set up an RFID skimmer so that if a member has their access card in their pocket, your image will be directly added to your wiki profile, and the twitpic tagged with your name. I have a working prototype but need to find a way to mount the reader and run the cables. Also I would appreciate any advice on improving the range - around 10cm is all I have, at most.

Code

The code is published under Gnu GPL and is attached, it's pretty straightforward:

import twitterpic.*;
import hypermedia.video.*;
import java.awt.Rectangle;
import javax.imageio.*;
import javax.imageio.stream.*;
import java.awt.image.BufferedImage;
OpenCV opencv;
ImageToTwitter twitterer;

PrintWriter output;
PFont fontp;
int capWidth =800;
int capHeight=600;
int contrast_value    = 127;
int brightness_value  = 127;
boolean drawArea      = true;
boolean transmit      = false;
int camera            = 0;
int margin            = 30;
String twiturl;
String twittag;
String twituser;
String twitpass;
int mode = 0;
long lastTwit;
long timeout          = 60000;
int recentMatches;
int minimumMatches    = 10;

void setup() {
///////////////
String[] lines = loadStrings("shitcam.cfg");
println("Config settings: " + lines.length + " lines");
for (int i=0; i < lines.length; i++) {
println(lines[i]);
}
twiturl        = lines[1]; println("twiturl=" + lines[1]);
twittag        = lines[3]; println("twittag=" + lines[3]);
twituser       = lines[5]; println("twituser=" + lines[5]);
twitpass       = lines[7]; println("twitpass=" + lines[7]);
transmit       = boolean(lines[9]); println("transmit=" + lines[9]);
contrast_value    = int(lines[11]);
contrast_value=contrast_value-127;
println("contrast=" + contrast_value);
brightness_value  = int(lines[13]);
brightness_value=brightness_value-127;
println("brightness=" + brightness_value );
drawArea          = boolean(lines[15]); println("drawArea=" + lines[15]);
camera            = int(lines[17]); println("camera=" + lines[17]);
margin            = int(lines[19]); println("margin=" + lines[19]);
timeout           = int(lines[21]); println("timeout=" + lines[21]);
minimumMatches    = int(lines[23]); println("minimumMatches=" + lines[23]);
capWidth          = int(lines[25]); println("capWidth=" + lines[25]);
capHeight         = int(lines[27]); println("capHeight=" + lines[27]);
///////////////
    //size( 320, 240 );
    size( capWidth, capHeight );
    fontp = loadFont("CourierNew36.vlw");
    opencv = new OpenCV( this );
    opencv.capture( width, height, camera );                   // open video stream
    opencv.cascade( OpenCV.CASCADE_FRONTALFACE_ALT2 );  // load detection description, here-> front face detection : "haarcascade_frontalface_alt.xml"
    twitterer = new ImageToTwitter(this);
}

public void stop() {
    opencv.stop();
    super.stop();
}

void draw() {
    // grab a new frame
    // and convert to gray
    opencv.read();
//    opencv.convert( GRAY );
    opencv.contrast( contrast_value );
    opencv.brightness( brightness_value );

    // proceed detection
    Rectangle[] faces = opencv.detect( 1.2, 2, OpenCV.HAAR_DO_CANNY_PRUNING, 40, 40 );
    if(faces.length == 0)
      recentMatches = 0;
    else
      ++recentMatches;

    // display the image
    image( opencv.image(), 0, 0 );
///////////////////////////////////
textFont(fontp, 12);
fill(color(200,0,200));
    // draw face area(s)
    if(drawArea==true){
      noFill();
      stroke(255,0,0);
      for( int i=0; i<faces.length; i++ ) {
         rect( faces[i].x, faces[i].y, faces[i].width, faces[i].height ); 
             stroke(125,0,0);
          rect( faces[i].x-margin, faces[i].y-margin, faces[i].width+margin*2, faces[i].height+margin*2 ); 
      }
    }
    
//    stroke(0,255,0);
    for( int i=0; i<faces.length; i++ ) {
      Rectangle face = new Rectangle(max(faces[i].x-margin, 0),
                                     max(faces[i].y-margin, 0),
                                     min(faces[i].width+margin*2, width-faces[i].x+margin-1), 
                                     min(faces[i].height+margin*2, height-faces[i].y+margin-1));
 //      rect(face.x, face.y, face.width, face.height);
 //       PImage snap = get(face.x, face.y, face.width, face.height);
        PImage snap = get(0,0,width, height);
        handleFace(snap);
    }
}

void handleFace(PImage snap){
  if(System.currentTimeMillis() > lastTwit+timeout
     && recentMatches >= minimumMatches){
    twit(snap);
  }
}

void twit(PImage img){
      text(("twitting"),20,20);
      img.loadPixels();
      twitterer.setType(twitterer.PNG);
      if (transmit==true)
      {twitterer.post(twiturl, "shitface", twituser, twitpass, twittag, true, getBytes(img));
      text(("twitted"),20,40);}
       if (transmit==false)
      {text(("internet off - no tweet"),20,60);}     
      lastTwit = System.currentTimeMillis();
}

byte[] getBytes(PImage img){
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  try{
    ImageIO.write((BufferedImage)img.getImage(), "png", baos);
  }catch(Exception e){
        e.printStackTrace();
        return new byte[0]; // Problem
  }
  return baos.toByteArray(); 
}

void save(PImage img){
  text(("saving"),20,20);
  img.save("snapshot.png");
  text(("saved"),20,40);
}