Good news - we're open for limited services in Wembley. Ujima House is now actively under refurbishment and we'd love your help in making the space the best it can be.

Please pay attention to the main LHS mailing list or pop into our #london-hack-space IRC channel and say hello.

During this interim period donations and continued membership are greatly appreciated while we transition to our new space.


From London Hackspace Wiki
Jump to: navigation, search

In the spirit of the intrepid hackers who have adorned our 'space with webcams in almost every corner, here is my humble contribution -

How It Works

The shitcam has been placed in a strategic, discrete location in the hackspace to capture people when sitting down to use the facilities. Instead of sending out a video stream, the raw data is continuously processed by a Processing sketch which uses OpenCV to implement face detection. When a face is detected a snapshot is uploaded to twitpic, using the twitpic4p library.

Before being twittered, the image is cropped to the approximate area of the detected face. This is important in order to respect people's privacy, and to avoid accidental prickpics, twatpics et c. Under testing I've found that extraneous bodyparts have only appeared when they actually look like faces.


To see the latest images look for london hackspace #shitface or go straight here.

Future Improvements

The picture quality is not great. Perhaps the could be a way to trigger a dslr when a face is detected, but focus might be a problem. I would also like to set up an RFID skimmer so that if a member has their access card in their pocket, your image will be directly added to your wiki profile, and the twitpic tagged with your name. I have a working prototype but need to find a way to mount the reader and run the cables. Also I would appreciate any advice on improving the range - around 10cm is all I have, at most.


The code is published under Gnu GPL and is attached, it's pretty straightforward:

import twitterpic.*;
import java.awt.Rectangle;
import javax.imageio.*;
import java.awt.image.BufferedImage;
OpenCV opencv;
ImageToTwitter twitterer;

PrintWriter output;
PFont fontp;
int capWidth =800;
int capHeight=600;
int contrast_value    = 127;
int brightness_value  = 127;
boolean drawArea      = true;
boolean transmit      = false;
int camera            = 0;
int margin            = 30;
String twiturl;
String twittag;
String twituser;
String twitpass;
int mode = 0;
long lastTwit;
long timeout          = 60000;
int recentMatches;
int minimumMatches    = 10;

void setup() {
String[] lines = loadStrings("shitcam.cfg");
println("Config settings: " + lines.length + " lines");
for (int i=0; i < lines.length; i++) {
twiturl        = lines[1]; println("twiturl=" + lines[1]);
twittag        = lines[3]; println("twittag=" + lines[3]);
twituser       = lines[5]; println("twituser=" + lines[5]);
twitpass       = lines[7]; println("twitpass=" + lines[7]);
transmit       = boolean(lines[9]); println("transmit=" + lines[9]);
contrast_value    = int(lines[11]);
println("contrast=" + contrast_value);
brightness_value  = int(lines[13]);
println("brightness=" + brightness_value );
drawArea          = boolean(lines[15]); println("drawArea=" + lines[15]);
camera            = int(lines[17]); println("camera=" + lines[17]);
margin            = int(lines[19]); println("margin=" + lines[19]);
timeout           = int(lines[21]); println("timeout=" + lines[21]);
minimumMatches    = int(lines[23]); println("minimumMatches=" + lines[23]);
capWidth          = int(lines[25]); println("capWidth=" + lines[25]);
capHeight         = int(lines[27]); println("capHeight=" + lines[27]);
    //size( 320, 240 );
    size( capWidth, capHeight );
    fontp = loadFont("CourierNew36.vlw");
    opencv = new OpenCV( this );
    opencv.capture( width, height, camera );                   // open video stream
    opencv.cascade( OpenCV.CASCADE_FRONTALFACE_ALT2 );  // load detection description, here-> front face detection : "haarcascade_frontalface_alt.xml"
    twitterer = new ImageToTwitter(this);

public void stop() {

void draw() {
    // grab a new frame
    // and convert to gray;
//    opencv.convert( GRAY );
    opencv.contrast( contrast_value );
    opencv.brightness( brightness_value );

    // proceed detection
    Rectangle[] faces = opencv.detect( 1.2, 2, OpenCV.HAAR_DO_CANNY_PRUNING, 40, 40 );
    if(faces.length == 0)
      recentMatches = 0;

    // display the image
    image( opencv.image(), 0, 0 );
textFont(fontp, 12);
    // draw face area(s)
      for( int i=0; i<faces.length; i++ ) {
         rect( faces[i].x, faces[i].y, faces[i].width, faces[i].height ); 
          rect( faces[i].x-margin, faces[i].y-margin, faces[i].width+margin*2, faces[i].height+margin*2 ); 
//    stroke(0,255,0);
    for( int i=0; i<faces.length; i++ ) {
      Rectangle face = new Rectangle(max(faces[i].x-margin, 0),
                                     max(faces[i].y-margin, 0),
                                     min(faces[i].width+margin*2, width-faces[i].x+margin-1), 
                                     min(faces[i].height+margin*2, height-faces[i].y+margin-1));
 //      rect(face.x, face.y, face.width, face.height);
 //       PImage snap = get(face.x, face.y, face.width, face.height);
        PImage snap = get(0,0,width, height);

void handleFace(PImage snap){
  if(System.currentTimeMillis() > lastTwit+timeout
     && recentMatches >= minimumMatches){

void twit(PImage img){
      if (transmit==true)
      {, "shitface", twituser, twitpass, twittag, true, getBytes(img));
       if (transmit==false)
      {text(("internet off - no tweet"),20,60);}     
      lastTwit = System.currentTimeMillis();

byte[] getBytes(PImage img){
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
    ImageIO.write((BufferedImage)img.getImage(), "png", baos);
  }catch(Exception e){
        return new byte[0]; // Problem
  return baos.toByteArray(); 

void save(PImage img){