kadoin-FaceOSC

robogif

Behind all that metal and code might there really be… a soul?

What’s happening?

  • Eyes light up as eyebrows are raised
  • Mouth shows the amplitude of the song over time
  • Music gets louder as mouth is opened almost like you/the robot is singing it

Melodramatic musings aside, I had a lot of fun with this project. It sort of just started with playing around with the sample code. I didn’t have a strong concept of what I wanted to do and it was my first time playing around with 3D so I didn’t really want to get into fancy shapes, but I could make a box move around in a space, and what do boxes look like? Robots. And what kind of space could this robot be in? Outer space. Nothing I was going to make would look super realistic, so the cartooniness of all the simple shapes worked well.

I really liked the new Star Trek movie that came out this summer, it was very reminiscent of the original series. I worked at a movie theater so I got to keep the poster, but I digress.  A Beastie Boys song blew up a fleet of robot ships, which was actually very fun to watch.

So I figured Intergalactic by the Beastie Boys would be a nice touch while also giving the mouth something to do.

Add a bit of interaction, map a few things, and bada bing bada boom, I got a fun space robot. It’s not the most conceptual thing maybe not even the most original, but it was really fun to make and I really like how it turned out. It’s pretty fun.

Since I can upload video for FaceOSC to analyze, I was thinking about making a music video type thing by putting different Star Trek clips through the program, but that would probably include a decent amount of editing to make it look nice, which I didn’t have time for. It’s still something I think I’ll do eventually, though. Maybe I’ll experiment more with light sources while I’m at it.

 

// a template for receiving face tracking osc messages from
// Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker
//
// 2012 Dan Wilcox danomatika.com
// for the IACD Spring 2012 class at the CMU School of Art
//
// adapted from from Greg Borenstein's 2011 example
// http://www.gregborenstein.com/
// https://gist.github.com/1603230

//also Golan's 'face controlled box' sample code towards the bottom of the assignment page was pretty helpful
//./deliverables/deliverables-05/


import processing.sound.*;
SoundFile file;
Amplitude amp;
import oscP5.*;
OscP5 oscP5;

Star[] stars = new Star[100];


int found; // global variable, indicates if a face is found

float poseScale;
PVector poseOrientation = new PVector(); // stores an (x,y,z)
PVector posePosition = new PVector();

float eyebrowLeft;
float eyebrowRight;
float mouthHeight;

int isPlaying = 0;
ArrayList amps= new ArrayList ();
float curAmp =0;
//----------------------------------
void setup() {
  size(640, 480, P3D);

  oscP5 = new OscP5(this, 8338);
  oscP5.plug(this, "found", "/found");
  oscP5.plug(this, "poseScale", "/pose/scale");
  oscP5.plug(this, "poseOrientation", "/pose/orientation");
  oscP5.plug(this, "posePosition", "/pose/position");
  oscP5.plug(this, "eyebrowLeftReceived", "/gesture/eyebrow/left");
  oscP5.plug(this, "eyebrowRightReceived", "/gesture/eyebrow/right");
  oscP5.plug(this, "mouthHeightReceived", "/gesture/mouth/height");

  file = new SoundFile(this, "intergalactic1.wav");
  amp = new Amplitude(this);



  for (int i =0; i <stars.length; i++) {
    stars[i] = new Star();
  }

  for (int j = 0; j<28; j++) {
    amps.add(0, 0.0);
  }
}

//----------------------------------
void draw() {

  int millis = millis();
  background (0);
  noStroke();
  lights();




  for (int i =0; i <stars.length; i++) {
    stars[i].update();
    stars[i].show();
  }



  if (found != 0) {
    if (isPlaying == 0) {
      file.loop();
      amp.input(file);
      isPlaying = 1;
    }
    curAmp = amp.analyze();
    amps.add(0, curAmp);
    amps.remove(amps.size()-1);

    float eyeBrightL = map(eyebrowLeft, 7.5, 9, 0, 1);
    float eyeBrightR = map(eyebrowRight, 7.5, 9, 0, 1);

    float beacon = map(millis%566, 50, 565, 0, 0.85);

    float mouthAmp = map(mouthHeight, 2, 11, .1, 1);
    file.amp(mouthAmp);


    pushMatrix(); 

    translate(posePosition.x, posePosition.y, posePosition.z);
    rotateY (0 - poseOrientation.y); 
    rotateX (0 - poseOrientation.x); 
    rotateZ ( poseOrientation.z); 


    scale(poseScale, poseScale, poseScale);


    //eyeLights
    lightFalloff(0.01, 0.0005, 0.00075);

    //antena light
    lightFalloff(1, 0.001, 0.0001);
    pointLight(255-255*beacon, 0, 0, 0, -45, 0);

    fill(200, 200, 250);
    box(40, 40, 40);//head

    translate(0, -20, 0);


    fill(150, 150, 200);
    box(10, 5, 10); //top peg
    fill(150, 150, 200);
    box(3, 25, 3);//antena
    translate(0, -15, 0);

    fill(255-255*beacon, 50-50*beacon, 50-50*beacon);
    sphere(4); //beep boop
    translate(0, 15, 0);

    fill(150, 150, 200);
    translate(0, 20, 0);
    translate(-20, 0, 0);
    box(10, 20, 20);//left peg 1
    translate(20, 0, 0);

    translate(20, 0, 0);
    box(10, 20, 20);//right peg 1
    translate(-20, 0, 0);

    fill(255, 255, 255);
    translate(-8, -8, 18);
    pointLight(255*eyeBrightL, 240*eyeBrightL, 0, -8, 0, 30);
    sphere(6);//left eye

    translate(8, 8, -18);

    translate(8, -8, 18);
    pointLight(255*eyeBrightR, 240*eyeBrightR, 0, 8, 0, 30);
    sphere(6);//right eye
    translate(-8, 8, -18);

    noLights();
    lights();

    translate(0, 8, 20);
    fill(150, 150, 200);
    box(30, 10, 5);//mouth
    fill(0);
    box(28, 8, 5.01);
    pushMatrix();
    for (int i =  -14; i<14; i++) {
      float h = amps.get(i+14)*10;//*mouthAmp;
      translate(i+0.5, 0, 2.52);
      fill(0, 0, 255);
      box(1, h, .1);
      translate(-i-0.5, 0, -2.52);
    }
    popMatrix();
    translate(0, -8, -20);

    popMatrix();
  }
}


//----------------------------------
// Event handlers for receiving FaceOSC data
public void found (int i) { 
  found = i;
}

public void poseScale(float s) {
  poseScale = s;
}

public void poseOrientation(float x, float y, float z) {
  poseOrientation.set(x, y, z);
}

public void posePosition(float x, float y) {
  posePosition.set(x, y, 0);
}

public void eyebrowLeftReceived(float f) {
  eyebrowLeft = f;
}

public void eyebrowRightReceived(float f) {
  eyebrowRight = f;
}

public void mouthHeightReceived(float h) {
  mouthHeight = h;
}


//Dan Shiffman had a cool video about how to make a warpspeed-like star field
//I thought it'd be a pretty sweet setting for my little bot
//https://www.youtube.com/watch?v=17WoOqgXsRM

class Star {
  float x;
  float y;
  float z;

  Star() {
    x = random(-width/2, width/2);
    y = random(-height/2, height/2);
    z = random(0, width);
  }

  void update() {
    z -= 15;
    if (z<1) {
      z=width;
      x = random(-width/2, width/2);
      y = random(-height/2, height/2);
    }
  }

  void show() {
    fill(255);
    float sx = map(x/z, 0, 1, 0, width);
    float sy = map(y/z, 0, 1, 0, height);
    pushMatrix();
    translate(width/2, height/2, -50);
    
    float r = map(z, 0, width, 16, 0);
    ellipse(sx, sy, r, r);
    popMatrix();
  }
}
Written by Comments Off on kadoin-FaceOSC Posted in FaceOSC

arialy-faceosc

faceoscsketch

When thinking about a concept for the face tracker, I was more interested in how the tracker would control something other than a face. Rather than use the movements of the face to control something, I wanted to control something with the lack of movement.

arialyfacegif2

import processing.sound.*;
SoundFile file;
Amplitude amp;
int dim;
int stillEnough = 0;
float vol = 0;
float thinness = 0;
float moonX = 525;

Particle[] particleList = new Particle[50];
int count = 0;
float globalRMS = map(.15, 0, .3, 0, 20);


//
// a template for receiving face tracking osc messages from
// Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker
//
// 2012 Dan Wilcox danomatika.com
// for the IACD Spring 2012 class at the CMU School of Art
//
// adapted from from Greg Borenstein's 2011 example
// http://www.gregborenstein.com/
// https://gist.github.com/1603230
//
import oscP5.*;
OscP5 oscP5;

// num faces found
int found;

// pose
float poseScale;
PVector posePosition = new PVector();
PVector poseOrientation = new PVector();


void setup() {
  dim = width/2;
  size(640, 480);
  frameRate(30);
  for (int o = 0; o<particleList.length; o++) { particleList[o] = new Particle(); } // Load a soundfile from the /data folder of the sketch and play it back file = new SoundFile(this, "portTown.mp3"); amp = new Amplitude(this); oscP5 = new OscP5(this, 8338); oscP5.plug(this, "found", "/found"); oscP5.plug(this, "poseScale", "/pose/scale"); oscP5.plug(this, "posePosition", "/pose/position"); oscP5.plug(this, "poseOrientation", "/pose/orientation"); } class Particle { PVector position; PVector velocity; float offset = random(-7, 1); Particle() { position = new PVector(width/2, height/2); velocity = new PVector(1 * random(-1, 1), -1 * random(-1, 1)); } void update() { // Add the current speed to the position. position.add(velocity); if (position.x > width) {
      position.x = 0;
    }
    if (position.x < 0) { position.x = width; } if ((position.y > height) || (position.y < 0)) { velocity.y = velocity.y * -1; } } void display() { // Display circle at x position //stroke(200); fill(255, 255, 224, thinness); ellipse(position.x, position.y, globalRMS+offset, globalRMS+offset); ellipse(position.x, position.y, (globalRMS+offset)*.5, (globalRMS+offset)*.5); } } void drawGradient(float x, float y, float r, float g, float b) { for (float ra = x*.75; ra > 0; ra -= 3) {
    fill(r, g, b);
    ellipse(x, y, width*1.5, ra);
    r = (r - 2) % 360;
    b = (b-.5) % 360;
    g = (g -2)%360;
  }
}


int still() {
  if (posePosition.x < width/2 + 50 && posePosition.x > width/2 -50) {
    return 1;
  } else {
    return 0;
  }
}

void draw() {  

  background(0);
  fill(255);
  ellipse(moonX, 75, 70, 70);
  fill(0);
  ellipse(moonX + 15, 75, 50, 50);

  rectMode(CORNERS);
  noStroke();
  fill(0, 100, 0, 200);
  rect(0, height/1.4, width, height);
  fill(0, 191, 255);
  drawGradient(width/2, height*.99, 0, 191, 255);

  //ellipse(width/2, height * .92, width *1.5, height/3);

  if (still() == 1) {
    if (thinness < 150) {
      thinness +=10;
    }
    println("hi");
    if (vol == 0) {

      file.play();
      amp.input(file);
    }
    if (vol < 1) { vol+= .001; file.amp(vol); } float rms = amp.analyze(); float mapRMS = map(rms, 0, .3, 10, 25); globalRMS = mapRMS; } else { if (vol > 0) {
      vol-= .01;
      file.amp(vol);
    }
  }

  if (posePosition.x > width/2 +50) {
    for (int m = 0; m<particleList.length; m++) { if (particleList[m] != null) { particleList[m].position.x -=2; } } if (thinness > 0) {
      thinness -=10;
    }
    moonX -=1;
  }

  if (posePosition.x < width/2 -50) {
    for (int n = 0; n<particleList.length; n++) { if (particleList[n] != null) { particleList[n].position.x +=2; } } if (thinness > 0) {
      thinness -=10;
    }
    moonX += 1;
  }

  for (int o = 0; o<particleList.length; o++) { if (particleList[o] != null) { particleList[o].update(); particleList[o].display(); } if (found > 0) {
      //translate(posePosition.x, posePosition.y);
      //scale(poseScale);
      noFill();
      rectMode(CENTER);
      fill(255, 255, 255, 20);
      ellipse(posePosition.x, posePosition.y, 16, 16);
    }
  }
}

// OSC CALLBACK FUNCTIONS

public void found(int i) {
  //println("found: " + i);
  found = i;
}

public void poseScale(float s) {
  //println("scale: " + s);
  poseScale = s;
}

public void posePosition(float x, float y) {
  //println("pose position\tX: " + x + " Y: " + y );
  posePosition.set(x, y, 0);
}

public void poseOrientation(float x, float y, float z) {
  //println("pose orientation\tX: " + x + " Y: " + y + " Z: " + z);
  poseOrientation.set(x, y, z);
}
// all other OSC messages end up here
void oscEvent(OscMessage m) {
  if (m.isPlugged() == false) {
    //println("UNPLUGGED: " + m);
  }
}
Written by Comments Off on arialy-faceosc Posted in FaceOSC

takos-lookingoutwards5

lo5

Robert Yang – Intimate, Infinite
For my looking outwards I looked into the work Intimate, Infinite by Robert Yang. Intimate Infinite is a 3d first person game based on the  novel “The Garden of Forking Paths” By Jorge Luis Borges.  I read about the story after I played the game, and It made it more interesting because the narrative is abstract, so I wasn’t able to fully understand the work the first time I played it. It was interesting and aesthetically pleasing but the content came across as confusing. The story is about a Chinese professor of English who is a spy for the germans, and he is found out and murdered. I think that the game expects the player to be more knowledgeable about the source story, but that this was not made clear – Unless the game is intended to be abstracted, which it may be because it lends itself well to this through the lagged video feed, the different points of views, and the changing narrator, as well as the discussion of different lives and different timeslines

 

cambu-faceosc

Humans look at real faces in the real world every day. But since the advent of smartphone technology, people have been spending increasing amounts of time looking at phone screens while out in public, even around other ‘real’ people. This “issue” has been the subject of a series of artist investigations and conversations within popular culture. I’ve found many of these pieces contain truth, but often whitewash or don’t delve into the actual reason(s) we’re so interested in looking at our phones. I was interested in tackling that situation my piece.

peoplenottogether

PROCESS

processfaceosc

THE PIPELINE

To enable the live capturing of my iPhone Screen, I constructed a multi-piece graphics pipeline. The structure is as follows:

desktoparea

The below work is an interesting piece of ‘selective focus’ that Golan pointed out to me when I showed him an iteration of project. The work is an entire year of New York Times covers where everything except people’s faces are blacked out.
theepic

Full Code Embed

// a template for receiving face tracking osc messages from
// Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker

// further adapted by Marisa Lu
// adapted by Kaleb Crawford 
// 2012 Dan Wilcox danomatika.com
// for the IACD Spring 2012 class at the CMU School of Art

// adapted from from Greg Borenstein's 2011 example
// http://www.gregborenstein.com/
// https://gist.github.com/1603230

import oscP5.*;
OscP5 oscP5;

import processing.video.*;
import gab.opencv.*;
import java.awt.Rectangle;

Capture cam;
//Movie cam;
// num faces found
int found;
float[] rawArray;

//which point is selected
int highlighted;

int liveOrNot = 1 ; //0 for recorded video, 1 for live
int shiftVal = 465;
int xOffset = round(0.1*width);
int yOffset = round(0.1*height);



void setup() {
 size(1280, 730);
 frameRate(60);
 oscP5 = new OscP5(this, 8338);
 oscP5.plug(this, "found", "/found");
 oscP5.plug(this, "rawData", "/raw");

 String[] cameras = Capture.list();

 if (cameras.length == 0) {
 //println("There are no cameras available for capture.");
 exit();
 } else {
 for (int i = 0; i < cameras.length; i++) {
 //println(cameras[i]);
 }

 cam = new Capture(this, 1024, 576, cameras[0]);
 cam.start();
 }
}

//void keyPressed() {
// if (keyCode == RIGHT) {
// highlighted = (highlighted + 2) % rawArray.length;
// }
// if (keyCode == LEFT) {
// highlighted = (highlighted - 2) % rawArray.length;
// if (highlighted < 0) {
// highlighted = rawArray.length-1;
// }
// }
//}
void draw() {
 //background(255, 255,255, 50);
 fill(255, 255, 255, 7);
 int border = 5;
 strokeWeight(border);
 rect(border, border, width-border*2, height-border*2);
 int timeNowSinceStart = millis() % 2555;
 float curColMod = map(timeNowSinceStart, 0, 2555, 0, 255);
 //drawPhoneFrame();

 //if (cam.available() == true) {
 // cam.read();
 //}
 //set(xOffset, yOffset, cam);

 if (found > 0) {
 for (int val = 0; val < rawArray.length -1; val+=2) {
 //function that changes stroke color if it been more than X time since last called
 chnageColIfBreak(curColMod);
 ellipse(rawArray[val], rawArray[val+1], 1, 1);
 }
 }
}

void drawPhoneFrame() {
 strokeWeight(1);
 int phoneWidth = 345;
 int phoneHeight = 675;
 int screenWidth = 315;
 int screenHeight = 570;

 //DRAW PHONE 
 stroke(0, 0, 0);

 rect(width/2 - phoneWidth*0.5, 45-15, phoneWidth, phoneHeight, 45); //phone frame
 rect(width/2 - 0.5*screenWidth, 45+15+15, screenWidth, screenHeight, 15); //phone screen
 rect(width/2 - 0.5*100, 45, 100, 15, 5); //earpiece
 ellipse(width/2, 675, 35, 35); //home
}

float currentMilVal = 0;
float prevMilVal = 0;
float someVal = 285; //this 1000 miliseconds or one second
int faceIncre;

void chnageColIfBreak(float curColMod) {
 currentMilVal = millis();
 if (currentMilVal - prevMilVal < someVal) {
 //the time between OSC face grabs has not been long enough to change the colour
 // aka, it just relocked a face, didn't switch to a new face
 } else {
 faceIncre++;
 int curSelect = faceIncre % 3;
 if (curSelect == 1) { // RED
 drawPhoneFrame();
 stroke(17, 45, 200 * (millis()%1000)/100);
 //stroke(curColMod*1.2, curColMod*0.8, curColMod*0.5);
 //println(curColMod*1.2 + "//" + curColMod + "//" + curColMod);
 } else if (curSelect == 2) { // GREEN
 drawPhoneFrame();
 stroke(32, 165, 50 * (millis()%1000)/100);
 //stroke(curColMod*0.1, curColMod*1.2, curColMod*0.3);
 } else { // curSelect == 3, in this case BLUE 
 drawPhoneFrame();
 stroke(120, 78, 245 * (millis()%1000)/100);
 //stroke(curColMod/8, curColMod/2, curColMod*1.65);
 }
 //println(faceIncre + " " + curSelect);
 }

 prevMilVal = currentMilVal;
}
/////////////////////////////////// OSC CALLBACK FUNCTIONS//////////////////////////////////

public void found(int i) {
 //println("found: " + i);
 found = i;
}

public void rawData(float[] raw) {
 //println("raw data saved to rawArray");
 rawArray = raw;
 
 if (liveOrNot == 0) {
 for (int x = 0; x < rawArray.length; x = x + 2) {
 rawArray[x] = rawArray[x] + shiftVal;
 }
 }
}
Written by Comments Off on cambu-faceosc Posted in FaceOSC

Jaqaur – LookingOutwards05

https://www.metavision.com/

I am writing about the presentation at Weird Reality that had the greatest impact on me: Meta. Meta is a company that works in augmented reality, and they strive to create an interface that is natural and gestureless. For example, one could select something by picking it up rather than by pointing your face at it and awkwardly pinching the air (like you have to do with some other AR systems). What really blew my mind was how fast VR/AR technology is advancing in ways I didn’t know about. For example, Meta currently has a working (if somewhat rough and not totally safe) way to let users feel virtual items just by touching them with their naked hand. And they (“they” not necessarily meaning Meta, but rather the ambiguous VR/AR scientists that develop this stuff) have wearable technology that can read the brain waves of a person and determine from that alone what the person is looking at. Similarly, they (same ambiguous they) can transmit an image (or at least a few colors) directly to a person’s brain, and it will be as if that person is seeing it with his or her eyes. Like, what?! That’s crazy! And once it develops further, it could have huge applications in medicine and psychology, not just entertainment. The presenter said that by 2030, he thinks we will have transcended AR goggles to the point that most people just wear devices that put the AR into their brains. That would be a huge advancement from the AR goggles they have now, which are clunky and a bit awkward. All in all, Weird Reality was a great experience, but Meta’s presentation in particular really reminded me just how FREAKING AWESOME this technology could be.

Check out a video of theirs (and this is from three years ago):

Keali-FaceOSC

dreamscapeoscgif

screenshot-2016-10-15-12-59-33
screenshot-2016-10-15-13-00-23

Rather than experimenting with a concept that was instinctively face-controlled, such as objects inherently representing faces, or masks/robots/other images, I wanted to explore something more abstract and relatable to my own aesthetics: that translated into more introspective, tranquil, and atmospheric ideas on which I base a lot of my interactive works. I almost automatically thought of skies/seas, gearing towards the starry path of galaxies and stardust in which someone can lose him or herself.

As such came the daydreaming concept of my project, being reliant on the openness of one’s eyes; I would allow a dreamscape of night stars light specks to fade in if the user’s eyes are closed or lower than/default. However, if the user is to open his or her eyes wider to a noticeable threshold, the dreamscape would snap back to a blank reality abruptly, as if one were suddenly woken up from a daydream by a third party. I also wanted to toy with the juxtaposition of having visuals that one ironically wouldn’t be able to see if his or her eye’s were opened (haha)–afterwards I just changed this to a daydream, rather than actual sleeping/eyes had to be fully closed, concept–for practical reasons; I also feel like this didn’t stray too far from my intrinsic intentions of “being” in another world.

The visuals this time hinged on the atmosphere of the dreamscape, dealt with through gradients, flickering/fading opacities, and the subtle movements of a new particle class that I learned. Rather than having a blank template or default face, I thought to translate the daydream aspect abstractly to the “awake” portion as well. At the default, “awake” phase of the simulation, I removed the traces of an avatar and instead designated two particle bursts of where the eyes would be–these minor explosions follow the user’s eyes and represent how the stardust from the dreamscape realm is almost itching to come to fruition, to burst out–indicating how difficult it is for someone to perhaps not daydream usually. Once someone blanks out or loses focus, or in this case lessen the openness of eyes, the dreamscape reappears and the stardust particles float effortlessly and arbitrarily throughout the screen. When the simulation is first run, the stardust of the dreamscape expand from a cluster in the middle, otherwise abstracted as the center of the user’s mind, and travel outwards to the boundaries, giving a sense of the user moving forward and falling deeper, traveling further into the dream; the dreamscape is otherwise decorated by flickering stars and a fading night gradient.

I also wanted to advance the dreamscape with calming, therapeutic music, and I did so by utilizing the Processing audio capabilities, modifying the amp() to be higher when the eyes are smaller/dreamscape is occurring, and the amp() be 0 and for the music to stop abruptly if the eyes were open/avatar was in “reality”. The audio I selected is the soundtrack Will of the Heart from the animated show Bleach.

Hence I wanted this to be a sort of introspective interaction between the person and him or herself: that people are fighting their own concentration and awareness to not accidentally fade in to a daydream–or perhaps purposely daydream and enjoy the experience. Other than that, it is an interaction between the user and the interface, to enjoy the visuals of an abstract, idyllic, and atmospheric setting.

faceoscsketches

**personal notes with troubleshooting/my own laptop in particular: I think it’s because my laptop is of a newer model with Windows, so its screen resolution is supposedly “crazy high”–that being, a lot of my software windows (i.e. Processing, Photoshop, CamStudio, OBS, etc.) turn up tiny because of their inherently smaller windows resolutions… as such when I run the Processing code, the output windows is just a small square 🙁 It’s unfortunate that I feel like the size takes away from fully being immersed in the visuals but it can’t be helped–I’ve checked online for problem-solving, and this has actually been tagged as a “high-priority” problem in Processing’s open-source GitHub forum, but no solution has been provided yet.. this project, especially when it got to the screengrab/video-recording portion, also really made me wish I had a Mac/used OSX haha.. finding a software to properly record the screen was hard–and then working with the software’s windows to properly click the Play/Stop button on my “high-res” laptop was even harder because–surprise–the windows for the software were tiny too. And then the outputted files were huge. (my first attempt outputted a file of 4.0 GB…) but–I just wanted to record some benchmarks for personal’s sake. This was quite a journey–and in the end I’m thankful I got what I needed to get done, done.

GitHub repository//

//
// a template for receiving face tracking osc messages from
// Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker
//
// 2012 Dan Wilcox danomatika.com
// for the IACD Spring 2012 class at the CMU School of Art
//
// adapted from from Greg Borenstein's 2011 example
// http://www.gregborenstein.com/
// https://gist.github.com/1603230
//

//modified 10.14.2016 for faceosc project 
//dreamscape by kearnie lin
import oscP5.*;
OscP5 oscP5;
import java.util.concurrent.ThreadLocalRandom;
import java.util.*;

import processing.sound.*; //for dreamscape audio
SoundFile file;

// num faces found
int found;

// pose
float poseScale;
PVector posePosition = new PVector();
PVector poseOrientation = new PVector();

// gesture
float mouthHeight;
float mouthWidth;
float eyeLeft;
float eyeRight;
float eyebrowLeft;
float eyebrowRight;
float jaw;
float nostrils;

// Constants
int Y_AXIS = 1;
int X_AXIS = 2;
color b1, b2, c1, c2;
int dim;

ParticleSystem ps;
ParticleSystem ps2;
Dust[] dustList = new Dust [60];
float gMove = map(.15,0,.3,0,30); //thank you ari!

void setup() {
  size(640, 640);
  frameRate(30);
  c1 = color(17,24,51);
  c2 = color(24,55,112);
  ps = new ParticleSystem(new PVector(eyeRight-25,-10));
  ps2 = new ParticleSystem(new PVector(eyeLeft+25, -10));
  for (int i = 0; i < dustList.length; i++) { dustList[i] = new Dust(); } file = new SoundFile(this, "faceoscmusic.mp3"); file.loop(); file.amp(0); //play audio when avatar is awake oscP5 = new OscP5(this, 8338); oscP5.plug(this, "found", "/found"); oscP5.plug(this, "poseScale", "/pose/scale"); oscP5.plug(this, "posePosition", "/pose/position"); oscP5.plug(this, "poseOrientation", "/pose/orientation"); oscP5.plug(this, "mouthWidthReceived", "/gesture/mouth/width"); oscP5.plug(this, "mouthHeightReceived", "/gesture/mouth/height"); oscP5.plug(this, "eyeLeftReceived", "/gesture/eye/left"); oscP5.plug(this, "eyeRightReceived", "/gesture/eye/right"); oscP5.plug(this, "eyebrowLeftReceived", "/gesture/eyebrow/left"); oscP5.plug(this, "eyebrowRightReceived", "/gesture/eyebrow/right"); oscP5.plug(this, "jawReceived", "/gesture/jaw"); oscP5.plug(this, "nostrilsReceived", "/gesture/nostrils"); } float eyesClosedValue = 0; void draw() { background(255); stroke(0); boolean eyesClosed = false; if(found > 0) {
    pushMatrix();
    translate(posePosition.x, posePosition.y);
    scale(poseScale);
    noFill();
    if (eyeLeft < 3.0 || eyeRight < 3.0 || eyebrowLeft < 7.8 || eyebrowRight < 7.8) { eyesClosed = true; } print(eyeLeft); //debugging (finding threshold vals) print(eyeRight); if (eyesClosed == false) { ps.addParticle(); ps.run(); ps2.addParticle(); ps2.run(); } popMatrix(); } if (eyesClosed) { file.amp(eyesClosedValue/255.0); c1 = color(17,24,51,eyesClosedValue); c2 = color(24,55,112,eyesClosedValue); eyesClosedValue += 3; if (eyesClosedValue > 255) eyesClosedValue = 255;
    //gradient
    setGradient(0, 0, width, height, c1, c2, Y_AXIS);
    Random ran = new Random(50);
    //implement stars

    for (int i = 0; i < 60; i++) {
      noStroke();
      int[] r = {230,235,242,250,255};
      int[] g = {228,234,242,250,255};
      int[] b = {147,175,208,240,255};
      int starA = (int)(min(ran.nextInt(100),eyesClosedValue) + sin((frameCount+ran.nextInt(100))/20.0)*40);
      fill(r[(ran.nextInt(5))],
           g[(ran.nextInt(5))],
           b[ran.nextInt(5)], starA);
      pushMatrix();
      
      translate(Float.valueOf(String.valueOf(width*ran.nextFloat())), Float.valueOf(String.valueOf(height*ran.nextFloat())));
      rotate(frameCount / -100.0);
      float r1 = 2 + (ran.nextFloat()*4);
      float r2 = 2.0 * r1;
      star(0, 0, r1, r2, 5); 
      popMatrix();
    }
   for (int j = 0; j < dustList.length; j++) { dustList[j].update(); dustList[j].display(); } } else { eyesClosedValue = 0; file.amp(0); } } class Dust { PVector position; PVector velocity; float move = random(-7,1); Dust() { position = new PVector(width/2,height/2); velocity = new PVector(1 * random(-1,1), -1 * random(-1,1)); } void update() { position.add(velocity); if (position.x > width) { position.x = 0; }
    if ((position.y > height) || (position.y < 0)) {
      velocity.y = velocity.y * -1;
    }
  }
  void display() {
    fill(255,255,212,100);
    ellipse(position.x,position.y,gMove+move, gMove+move);
    ellipse(position.x,position.y,(gMove+move)*0.5,(gMove+move)*0.5);
  }
}

class ParticleSystem {
  ArrayList particles;
  PVector origin;
  
  ParticleSystem(PVector location) {
    origin = location.copy();
    particles = new ArrayList();
  }
  
  void addParticle() {
    particles.add(new Particle(origin));
  }
  
  void run() {
    for (int i = particles.size()-1; i >= 0; i--) {
      Particle p = particles.get(i);
      p.run();
      if (p.isDead()) {
        particles.remove(i);
      }
    }
  }
}

class Particle {
  PVector location;
  PVector velocity;
  PVector acceleration;
  float lifespan;

  Particle(PVector l) {
    acceleration = new PVector(0,0.05);
    velocity = new PVector(random(-1,1),random(-2,0));
    location = l.copy();
    lifespan = 255.0;
  }

  void run() {
    update();
    display();
  }

  // update location 
  void update() {
    velocity.add(acceleration);
    location.add(velocity);
    lifespan -= 5.0;
  }

  // display particles
  void display() {
    noStroke();
    //fill(216,226,237,lifespan-15);
    //ellipse(location.x,location.y,3,3);
    fill(248,255,122,lifespan);
    ellipse(location.x,location.y,2,2);
    ellipse(location.x,location.y,2.5,2.5);
  }
  
  // "irrelevant" particle
  boolean isDead() {
    if (lifespan < 0.0) {
      return true;
    } else {
      return false;
    }
  }
}

//draw stars
void star(float x, float y, float radius1, float radius2, int npoints) {
  float angle = TWO_PI / npoints;
  float halfAngle = angle/2.0;
  beginShape();
  for (float a = 0; a < TWO_PI; a += angle) {
    float sx = x + cos(a) * radius2;
    float sy = y + sin(a) * radius2;
    vertex(sx, sy);
    sx = x + cos(a+halfAngle) * radius1;
    sy = y + sin(a+halfAngle) * radius1;
    vertex(sx, sy);
  }
  endShape(CLOSE);
}

//draw gradient
void setGradient(int x, int y, float w, float h, color c1, color c2, int axis ) {
  noFill();
  if (axis == Y_AXIS) {  // Top to bottom gradient
    for (int i = y; i <= y+h; i++) {
      float inter = map(i, y, y+h, 0, 1);
      color c = lerpColor(c1, c2, inter);
      stroke(c);
      line(x, i, x+w, i);
    }
  }  
}

// OSC CALLBACK FUNCTIONS

public void found(int i) {
  println("found: " + i);
  found = i;
}

public void poseScale(float s) {
  println("scale: " + s);
  poseScale = s;
}

public void posePosition(float x, float y) {
  println("pose position\tX: " + x + " Y: " + y );
  posePosition.set(x, y, 0);
}

public void poseOrientation(float x, float y, float z) {
  println("pose orientation\tX: " + x + " Y: " + y + " Z: " + z);
  poseOrientation.set(x, y, z);
}

public void mouthWidthReceived(float w) {
  println("mouth Width: " + w);
  mouthWidth = w;
}

public void mouthHeightReceived(float h) {
  println("mouth height: " + h);
  mouthHeight = h;
}

public void eyeLeftReceived(float f) {
  println("eye left: " + f);
  eyeLeft = f;
}

public void eyeRightReceived(float f) {
  println("eye right: " + f);
  eyeRight = f;
}

public void eyebrowLeftReceived(float f) {
  println("eyebrow left: " + f);
  eyebrowLeft = f;
}

public void eyebrowRightReceived(float f) {
  println("eyebrow right: " + f);
  eyebrowRight = f;
}

public void jawReceived(float f) {
  println("jaw: " + f);
  jaw = f;
}

public void nostrilsReceived(float f) {
  println("nostrils: " + f);
  nostrils = f;
}

// all other OSC messages end up here
void oscEvent(OscMessage m) {
  if(m.isPlugged() == false) {
    println("UNPLUGGED: " + m);
  }
}
Written by Comments Off on Keali-FaceOSC Posted in FaceOSC

takos-faceosc

Gif:
crying-simulator-4crying-simulator-3

Video:

Process:
While working through hard assignments and preparing for midterms with friends, I often hear people say “I’m crying”, even when they aren’t. I wanted to make those dreams a reality. I wanted to create a crying simulator that reacts to your facial expressions and allows you to see a visual representation of your tears as you drown in them. I started with th eraw data template that was available on the deliverables page, then I found a particle simulator on https://www.openprocessing.org/sketch/204560 by Peter Farrell, which I adapted to shoot particles out of your eyes when you are not smiling (It senses smiling by checking if your eyes are open and your mouth is not closed). When the face is in a resting position, it cries, and when it is in a screaming expression (closed eyes, open mouth), maximum tears are achieved. For the tears that are pooling up at the bottom of the screen, I used https://www.openprocessing.org/sketch/376964 By Ilyas Shafigin, and adapted it so that it reacts to the locations of each individual tears instead of the mouse click.

Sketch:
p_20161014_181532

Code:
//I took these initial commnets out of the code part because it causes my embed to glitch on wordpress
// Processing 3.0x template for receiving raw points from
// Kyle McDonald’s FaceOSC v.1.1
// https://github.com/kylemcdonald/ofxFaceTracker
//
// Adapted by Kaleb Crawford, 2016, after:
// 2012 Dan Wilcox danomatika.com
// for the IACD Spring 2012 class at the CMU School of Art
// adapted from from Greg Borenstein’s 2011 example
// https://gist.github.com/1603230
//
//
// tears modified from code on openprocessing sketch/204560
// water from Ilyas Shafigin https://www.openprocessing.org/sketch/376964

import oscP5.*;
import java.util.Iterator;
int num = 1; //number of particles
ArrayList particles;

int mode = 0;

OscP5 oscP5;
int found;
int isSaved;
float[] rawArray;
int highlighted; //which point is selected
int totalTears;

float waterYValue;
//---------
int md = 3, area = 3, number, offset;
int w =2, hw = w / 2;
float lev = -30.0f, k1 = 0.06f, k2 = 0.09f;
float damping = 0.96f;
float mouseAdd = 14.0f, md1 = md - 1.0f;
float[] value, speed;
int fps = 60;
float dt = fps / 1000f;
//--------------------------------------------
//from openprocessing
class Particle {
  float x; //x-position
  float y; //y-position
  float dx; //horizontal velocity
  float dy; //vertical velocity
  float lifespan; 

  //Constructor
  Particle(float _x, float _y, float _dx, float _dy) {
    x = _x;
    y = _y;
    dx = _dx;
    dy = _dy;
    lifespan = 0; //countdown from white to black
  }

  void update() {
    dy += 0.15; //accelerationGRAVITY
    x += dx; //update position by velocity
    y += dy;
    lifespan += 6; 

    ellipse(x, y, 6, 6);
    
    if (abs(y-(height-waterYValue))<3) {
      //x = x < 0 ? 0 : x > width - 1 ? width - 1 : x;
      int ix = int((x - hw) / w);
      float m = 3;
      if (ix < 0) {ix = 0;} else if (ix > 300) {ix = 300;}
      speed[ix] += m;

      for (int i = 1; i < md; i++) { int i2 = ix - i; if (i2 >= 0) speed[i2] += m * herm((md1 - i) / md1);

        i2 = ix + i;
        if (i2 < number) speed[i2] += m * herm((md1 - i) / md1); } } } boolean isDead() { if (lifespan >255.0) {
      return true;
    } else {
      return false;
    }
  }
}


//--------------------------------------------
void setup() {
  textAlign(CENTER); 
  size(640, 480);
  frameRate(30);
  oscP5 = new OscP5(this, 8338);
  oscP5.plug(this, "found", "/found");
  oscP5.plug(this, "rawData", "/raw");

  particles = new ArrayList();

  smooth();

  number = width / w;
  offset = height / 2;
  waterYValue = 0; 

  value = new float[number];
  speed = new float[number];

  for (int i = 0; i < number; i++) {
    value[i] = speed[i] = 0.0f;
  }
}


//-------
void update() {
  for (int i = 0; i < number; i++) {
    float v = value[i];
    speed[i] -= k1 * (v - lev);

    for (int j = 1; j <= area; j++) {
      speed[i] += k2 * (value[max(i - j, 0)] + value[min(i + j, number - 1)] - 2 * v) / j;
    }
  }

  for (int i = 0; i < number; i++) { value[i] += speed[i] *= damping; } } //-------- float herm(float t) { return t * t * (3.0f - 2.0f * t); } //-------------------------------------------- boolean isSmiling() { if ( abs(rawArray[123]-rawArray[129]) > 6) {
    return true;
  }
  return false;
}
int centerLeftEyeX() {
  return(int((rawArray[74]+rawArray[76]+rawArray[82]+rawArray[80])/4));
}
int centerLeftEyeY() {
  return(int((rawArray[75]+rawArray[77]+rawArray[83]+rawArray[81])/4));
}
int centerRightEyeX() {
  return(int((rawArray[86]+rawArray[88]+rawArray[94]+rawArray[92])/4));
}
int centerRightEyeY() {
  return(int((rawArray[87]+rawArray[89]+rawArray[95]+rawArray[93])/4));
}

//-----------------------
void lineFace() {

  

  for (int i = 0; i<=128; i=i+2) {
    if (i !=32 && i !=52 && i != 42 && i != 70 && i!=82 && i!=94 && i!=118 && i!=124) {
      line(rawArray[i], rawArray[i+1], rawArray[i+2], rawArray[i+3]);
    }
  }
  line(rawArray[96], rawArray[97], rawArray[120], rawArray[121]);
  line(rawArray[124], rawArray[125], rawArray[108], rawArray[109]);  
  line(rawArray[126], rawArray[127], rawArray[108], rawArray[109]);   
  line(rawArray[130], rawArray[131], rawArray[96], rawArray[97]);  
  line(rawArray[118], rawArray[119], rawArray[96], rawArray[97]);  
  line(rawArray[94], rawArray[95], rawArray[84], rawArray[85]); 
  line(rawArray[82], rawArray[83], rawArray[72], rawArray[73]);
  line(rawArray[70], rawArray[71], rawArray[54], rawArray[55]);
  line(rawArray[62], rawArray[63], rawArray[54], rawArray[55]);
  line(rawArray[60], rawArray[61], rawArray[70], rawArray[71]);
}






//-------------
void drawTears() {
  noStroke();
  if (isSmiling() == false) {
    waterYValue += 1;
    totalTears += 1;
    num = 2;
    for (int i = 0; i<num; i++) {
      particles.add(new Particle(centerRightEyeX(), 
        centerRightEyeY(), 
        random(-2, 2), 
        random(-2, 2)));
      particles.add(new Particle(centerLeftEyeX(), 
        centerLeftEyeY(), 
        random(-2, 2), 
        random(-2, 2)));
    }
  } else if ( abs(rawArray[75]-rawArray[83]) < 6) {
    waterYValue += 2;
    totalTears +=4;
    num = 4;
    for (int i = 0; i < num; i++) {
      particles.add(new Particle(centerRightEyeX(), 
        centerRightEyeY(), 
        random(-6, 6), 
        random(-6, 6)));
      particles.add(new Particle(centerLeftEyeX(), 
        centerLeftEyeY(), 
        random(-6, 6), 
        random(-6, 6)));
    }
  } else {
    waterYValue -= 2;
    if (waterYValue < 0) { waterYValue = 0; } } if (waterYValue > height) {
    waterYValue = height;
  }


  Iterator it = particles.iterator();
  while (it.hasNext()) {
    Particle b = it.next();
    b.update();
    if (b.isDead()) {
      it.remove();
    }
  }
}
//--------------
void draw() { 
  stroke(204, 225, 226);

    if (rawArray != null) {
      background(228, 241, 242);
      lineFace();
    
    noStroke();
    //text("Crying Simulator 2.0 : You've cried " + totalTears + " tears", width/2, 10);

    if (found != 0) {
      if (rawArray == null) {
        fill(0); 
        text("Make sure FaceOSC is sending raw data.", 20, 20);
      }

      drawTears();
      println(waterYValue);
    }
    update();
    noStroke();
    fill(59, 180, 250);
    for (int i = 0; i < number - 1; i++) {
      float x1 = hw + i * w;
      float y1 = height - waterYValue - value[i];
      float x2 = hw + (i + 1) * w;
      float y2 = height - waterYValue - value[i + 1];
      beginShape(QUAD);
      vertex(x1, height);
      vertex(x1, y1);
      vertex(x2, y2);
      vertex(x2, height);
      endShape(CLOSE);
    }    
    if ( (height-waterYValue) < rawArray[39]) {
          stroke(79, 190, 255);
          lineFace();
          stroke(204, 225, 226);
        } }
}

//--------------------------------------------
public void found(int i) {
  // println("found: " + i);
  found = i;
}
public void rawData(float[] raw) {
  rawArray = raw; // stash data in array
}

//--------------------------------------------
void keyPressed() {
  if (keyCode == RIGHT) {
    highlighted = (highlighted + 2) % rawArray.length;
  }
  if (keyCode == LEFT) {
    highlighted = (highlighted - 2) % rawArray.length;
    if (highlighted < 0) {
      highlighted = rawArray.length-1;
    }
  }
  if (keyCode == DOWN) {
    mode = 1;
  }
}
Written by Comments Off on takos-faceosc Posted in FaceOSC

Krawleb-LookingOutwards05

The presentation that I was most impressed by (of those I attended, which was unfortunately not as many as I would have liked) was the presentation by Stefan Welker about Google’s ‘daydream labs’. I appreciated the way they approached their timelines and decided to prototype from the ground up on a weekly basis, valuing diversity of techniques rather than working on refining a larger project. I think that this approach to VR development, an incredibly small team on a quick turnaround, is more honest to the medium which is arguably still in its nascency. Unlike many of the projects that I’ve seen that appear to be little more than ‘immersive’ ports of screen-based interactions, their prototypes focused on testing interactions unique to room-scale VR as a medium, finding successes and failures in both techniques and social contexts. As someone who is as interested in the interaction methods and context of VR as the content of VR, Welker’s role sounds immensely exciting, working broadly to explore new types of interactions for what many (myself included) believe will evolve into an increasingly prominent medium.

He also mentioned that they frequently make blog posts summarizing their findings here, working to build more of a community of best-practices and patternized interactions, which is the sort of early-stage interaction design that VR needs right now.

Antar-lookingoutwards05

The Avocado Experience

avocado

During the Weird Reality show, I had the great pleasure to work the untitled avocado virtual reality experience by Scott Andrew and the Institute for New Feeling. During my five hour shift I got to explore the piece fairly deeply, and had the great opportunity of seeing how so many people interpreted and reacted to the work differently.

The experience began in a warehouse with a few bins of interested objects around the perimeter. If the user picked up an object, it would likely say that the object was out of stock. However, if the user picked up an avocado, the user was transported to a fantasy world. In this world the used embarked on a truck journey, one which the user was much more limited in movement, and if the user let go of the avocado, they would be brought back to the warehouse. Each time the user returned to the fantasy world they were brought back to the beginning of the truck journey. Through out the journey, the user was able to tap on an “add to cart” button with their available hand. While the user had no visual response whether or not they had successfully added the avocado to cart, in the browser on the computer it was possible to see the number of avocados in the cart. After the experience is over, the user was able to purchase the avocados on Amazon. When on the truck journey, the user was driving around a long curve through an avocado farm, and could see Aztec pyramids in the horizon. When driving through the farm, there were billboards throughout with statements about drought, or photos of models. After the user reached the end of the curve, they were able to see the destination of the truck. This destination was a large house with the fourth wall missing, allowing the truck to drive right into the living room. There was a TV on the far wall, with a sports game playing. On either side of the TV was a portrait of Vladimir Putin, and a portrait of a german shepherd. Once at the house the user was able to hear George Bush Jr. and his wife speaking. Once the user had been in the house for a moment, they were transported back to the warehouse.

For some users, the truck journey was long and a bit of a let down. After being transported back to the warehouse, they would take off the Vive and ask, “That’s it?”, while others would laugh in delight. Some users never got to the house because they were having too much fun picking up, throwing, juggling and playing with the avocados. Others, however, rarely picked up an avocado, and enjoyed the challenge of trying to grab things that were out of reach or inaccessible.

At the end of the night (2am), I was able to fully explore the experience for myself, and I took the luxury to read the rather long artist statement that existed above the avocado bin in the virtual warehouse. This is where I got the chance to learn fascinating information such as, “Avocado” is from Nahuatl word “ahuacatl”, which means “testicle”, which helped make the fruit sound more exotic than its original name “Alligator-Pear”. The statement followed the fruit’s rise to popularity through the late 20th century. A lot of people put a lot of time and money in to advertising and branding the avocado. The fruit was supposed to be a symbol for the California dream – the fruit of the healthy and happy. While on the truck journey, the user sees all the billboards that explain key moments in the history of the fruit, including a promotion which included winning a “Mrs Ripe” contest to be on Baywatch. It also included the Tom Selleck scandal. The Bush’s voices are present due to the choking incident in 2002. In essence this long and anticlimactic ride on a truck is a representation of the capitalistic efforts to bring rise to a fruit named after male gentiles.

Antar-FaceOSC

Happy Orbs

illustration_sans_titre

The goal for this week was to become familiar with FaceOSC, but also using P3D. In particular I wanted to learn more about lighting and camera angles. I wanted to have a simple but delightful interaction with some floating orbs that would respond to your face and your energy.

Read the code here.

In terms of what the orbs are responding to and how they are responding, there are a few different criteria.

  • The close you are to the camera, the more red the orbs glow
  • The further away you are, the more blue the orbs glow
    • Based on how far your left temple is from your right
  • The orbs bounce more excitedly the bigger you smile
    • Based on the distance from the left corner of your mouth to the right
  • The close your chin is to your chest, the smaller the orbs
    • Based on the distance from your nose to your chin
  • The camera will follow your nose
    • Based on your nose x,y coordinates

Keep your head high, smile, look straight, and stay close and engaged. The following will result in excited, large, red, orbs.

closered farblue

Written by Comments Off on Antar-FaceOSC Posted in FaceOSC

Guodu-FaceOSC

fsdf

Inspired by Text Rain and Typeface2 by Mary Huang. In the process, I became intrigued by painting with type, with one’s face movements.

gif1 Type as a path gif2 Got into rotating type

gif4

gif5

// a template for receiving face tracking osc messages from
// Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker
//
// 2012 Dan Wilcox danomatika.com
// for the IACD Spring 2012 class at the CMU School of Art
//
// adapted from from Greg Borenstein's 2011 example
// http://www.gregborenstein.com/
// https://gist.github.com/1603230
//

import oscP5.*;
OscP5 oscP5;
// num faces found
int found;
float[] rawArray;
//which point is selected
int highlighted;
String word = "HelloWorld";
//int wordIndex = int(random(wordBank.length));
float textX;
float textY;
//float speed = 1;
float speed = .01;
//float posX = rawArray[0];

//float textSize = random(0 30);
float textSize = 30;
float rgbColor = random(255);

void setup() {
size(640, 480);
frameRate(30);
oscP5 = new OscP5(this, 8338);
oscP5.plug(this, "found", "/found");
oscP5.plug(this, "rawData", "/raw");

textAlign(CENTER);
textX = random(width);
}
void draw() {
//background(255);
//stroke(0);
//noStroke();
//background(255, 0, 0, 10);
textSize(textSize);

if(found > 0) {
for (int val = 0; val < rawArray.length -1; val+=2){
if (val == highlighted){ fill(255,0,0);}
else{fill(100);}

//ellipse(rawArray[val], rawArray[val+1],8,8);
//text("Use Left and Right arrow keys to cycle through points",20,20);
//text( "current index = [" + highlighted + "," + int(highlighted + 1) + "]", 20, 40);
//println(rawArray[val]);
fallingWords();
}
}
}
void fallingWords(){

float rgbColor = random(255);
textY = textY+speed;
float x1=map(textY, 0, height, 0, 255);
fill(rgbColor+x1, 102, 153, 80);
textSize = rawArray[128]/5;
rotate(rawArray[100]);
text(word, rawArray[100], textY);
if (textY > height){
//reset text's falling placement (textX, textY) and speed
textX = random(width);
textY = 0;
//speed = 1;
speed = random(.01,.1);
background(204,204,204);

}
}
void keyPressed(){
if (keyCode == RIGHT){
highlighted = (highlighted + 2) % rawArray.length;
}
if (keyCode == LEFT){
highlighted = (highlighted - 2) % rawArray.length;
if (highlighted < 0){highlighted = rawArray.length-1;
}
}
}
/////////////////////////////////// OSC CALLBACK FUNCTIONS//////////////////////////////////
public void found(int i) {
println("found: " + i);
found = i;
}
public void rawData(float[] raw) {
println("raw data saved to rawArray");
rawArray = raw;

}

 

Zarard-FaceOSC

So my inspiration for this project was The Wizard from the Wizard of Oz. One idea that I really enjoy is the mystery of the floating head. Although it’s really cheesy looking back on it now. I wanted to see if i could create the same feeling of grandness that the special effects in the picture below conveys. To me the grandeur doesn’t come from the fire or the pedestals, it actually comes from the magic conveyed by floating and transparency.

wizardofoz1

Those are the simple things I was most focused on capturing. Something I also played with was different ways to create depth. To create the depth complexity of the face in 2D is really hard however the person who was actually most famous for doing that inspired me, Picasso. With cubism he just broke everything into shapes (not particular accurate placement or shading) however the humanistic aspect was still conveyed. By trying to take aspects from his cubism, I realized that color would play a big part in how much this effect could be conveyed. Monochromatic was true to the one hue attribute of faces so I kept that. But since the polygons looked kind of glassy with the triangles I went for more modern colors. Additionally, I tried to make sure the mask was more dynamic than the normal one-to-one movements such as: if you blink the mask blinks or if you smile the screen turns yellow. So with my computation I tried to make the mask evolve through changing the polygons composing the face but not necessarily in direct response to your movements.

 

A couple of technical notes:

Because the face tracker constantly lost my face in different lighting environments, I have the program set so that the program just pauses when it loses your face.

Additionally the mask uses all of the data points given by the OSC face tracker which is why the mask reflects the face so well.


import oscP5.*;
OscP5 oscP5;
int found;
float[] rawArray;
int highlighted; //which point is selected
int pickpoint = 0;
IntList pickpoints = new IntList();
int numpoints = 300;
 
//--------------------------------------------
void setup() {
 size(640, 480);
 frameRate(30);
 oscP5 = new OscP5(this, 8338);
 oscP5.plug(this, "found", "/found");
 oscP5.plug(this, "rawData", "/raw");
 
}
int time=0; 
//--------------------------------------------
void draw() {
 pushMatrix();
 scale(1.75);
 translate(-150,-150);
 
 
 //fill(random(0,20),random(200,244),random(140,150),100);
 noStroke();
 
 if (found != 0) {
 background(230,230,250);
 // fill in cubist mask
 
 fill(20,30);
 beginShape();
 for (int edge = 0; edge <= 32; edge +=2){
    vertex(rawArray[edge], rawArray[edge+1]);
 } 
 vertex(rawArray[52], rawArray[53]);
 vertex(rawArray[48], rawArray[49]);
 vertex(rawArray[38], rawArray[39]);
 vertex(rawArray[34], rawArray[35]);
 endShape();
 
 // fill in eyebrows
 //strokeWeight(5);
 //strokeJoin(MITER);
 ////strokeCap(SQUARE);
 //stroke(100);
 fill(random(0,50),140);
 beginShape();
 for (int brow = 34; brow < 42; brow +=2){
   vertex(rawArray[brow],rawArray[brow+1]);
 }
 endShape();
 
 beginShape();
 for (int brow = 42; brow < 52; brow +=2){
   if (brow != 42){
     vertex(rawArray[brow],rawArray[brow+1]);
   }
 }
 endShape();
 noStroke();
 //fill in nose
 fill(random(0,50),180);
 beginShape();
 vertex(rawArray[54], rawArray[55]);
 vertex(rawArray[70], rawArray[71]);
 vertex(rawArray[66], rawArray[67]);
 vertex(rawArray[62], rawArray[63]);
 endShape();
 
 //fill in left eyes
 fill(0, random(50,200));
 beginShape();
 for(int eye = 72; eye < 82; eye +=2){
   vertex(rawArray[eye], rawArray[eye+1]);
 } 
 endShape();
 
 //fill in right eyes
 fill(0, random(50,200));
 beginShape();
 for(int eye = 84; eye < 94; eye +=2){
   vertex(rawArray[eye], rawArray[eye+1]);
 } 
 endShape();
 
 if (pickpoints.size() == 0){
   for(int k = 0; k < numpoints; k += 3){
     pickpoint= int(random(rawArray.length));
     float x,y;
     if (pickpoint%2 == 1){
       x = pickpoint -1; 
       y = pickpoint;
     } else {
       x = pickpoint; 
       y = pickpoint + 1;
     }
   pickpoints.set(k,int(x));
   pickpoints.set(k+1,int(y));
   pickpoints.set(k+2, int(random(100)));
   }
 }
 for (int val = 0; val < rawArray.length -1; val+=2) {
   if (val == highlighted) { 
     fill(255, 0, 0);
   } else {
     fill(100, random(255));
   }

 }
 if (time % 3 == 0){ 
 for(int k = 0; k < numpoints; k += 3){
   if( int(random(0,8)) == 0){
     pickpoint= int(random(rawArray.length));
     float x,y;
     if (pickpoint%2 == 1){
       x = pickpoint -1; 
       y = pickpoint;
     } else {
       x = pickpoint; 
       y = pickpoint + 1;
     }
   pickpoints.set(k,int(x));
   pickpoints.set(k+1,int(y));
   pickpoints.set(k+2, int(random(100)));
 }
 }
 time = 0;
 print(pickpoints);
 }
 //pickpoints: x, y, alpha
 noStroke();
 
 for (int i = 0; i+7 < pickpoints.size(); i+=9){
   if(pickpoints.size() != 0){
   //make triangles by hopping every 9 points?
     fill(0,pickpoints.get(i+2)); 
     beginShape();
     vertex(rawArray[pickpoints.get(i)],rawArray[pickpoints.get(i+1)]);
     vertex(rawArray[pickpoints.get(i+3)],rawArray[pickpoints.get(i+1+3)]);
     vertex(rawArray[pickpoints.get(i+3+3)],rawArray[pickpoints.get(i+1+3+3)]);
     endShape();
   }
 }
 
 }
 time += 1;
 popMatrix();
}
 
//--------------------------------------------
public void found(int i) {
 
 found = i;
}
public void rawData(float[] raw) {
 rawArray = raw; // stash data in array
}

 

 

Written by Comments Off on Zarard-FaceOSC Posted in FaceOSC

Catlu – LookingOutwards05

Jeremy Bailey – Preterna (AR Pregnancy)

jeremy-bailey

One of the projects I found really interesting at the VR salon during Weird Reality was Jeremy Bailey’s pregnancy simulator, Preterna. When I put the VR headset on, I was transported to a calm plain of grasses and wildflowers. As I looked down, I saw the body of a pregnant woman. I thought the premise and execution of this project was really smart. By placing the mesh of a pregnant woman at a certain place and having us stand at that same place, it really did feel natural to look down and see a body that could be ours. I appreciated how we could see funky versions of our arms and hands without having to hold a remote or controller. It made the feeling of holding my hands to my “pregnant belly” more real. Although I couldn’t actually feel the belly, I did get an odd sense of happiness and contentment, probably because of general associations with motherhood and happiness, and also because of the calm environment. Everyone has wished to step into a body of someone of the opposite gender, and I think this is a great way for men to see at least a little bit what it’s like being pregnant. I thought it was very smart and thought provoking.

Catlu – FaceOSC

FaceOSC Project Video:

GIF:

movement-of-air

I couldn’t get the WP-Syntax Plugin to work correctly for my code, so here is a link to the code on Github where it looks decently nice:
https://github.com/catlu4416/60-212/blob/master/Face_thing.pde

20161014_04242720161014_042449

 

For this project I began with a loss of what to do. At first I did some research into importing 3D models into Processing, but realized quickly that I did not have the time to figure that out. I thought about making a game, but felt weird about controlling it with my face. Personally, I find moving parts of my face like my eyebrows very hard and awkward. In the end I decided to do a small devilish face that hid behind energy particles. When the devil face opens its mouth, the particles gather, and when they’ve collected, it shoots the particles back at the screen. Afterwards, the particles return to how they were before, albeit closer to the devil face’s mouth. If you don’t let the particles gather long enough and don’t keep your mouth big enough, the particles will slowly move back to their positions. I feel alright about this project, but not super. I didn’t have a very intriguing inspiration for this particular project. Although I think the end result is fun, I didn’t get to do as much with it as I wanted. It definitely took me a lot longer to make the particles gather, disperse, and follow certain steps, than I thought it would. I added a few small nuances like randomized speed of the particles and size changing, but I feel like there could have been more attention to making it really shine.

Written by Comments Off on Catlu – FaceOSC Posted in FaceOSC

Krawleb-FaceOSC

My project is a Skill-crane that you control with your face. I call it SkillCranium.

It uses the face position (specifically, between the eyes, to control the position of a skill crane. Opening your mouth opens the crane claw, and closing it closes the claw. The objective of the game is to get a box into the container on the left of the screen.

It’s remarkably hard. And it forces people playing to make a lot of silly faces.

I was interested in making something non-face-like at all, exploring the awkwardness of  face as an interaction method, and forcing unnatural expressions as a gesture. The result was entertainingly uncomfortable, a system that requires remarkable patience and attention to head position in a way we aren’t accustomed to.

Additionally, this project was a pretty steep technical leaning curve for me, as I have never implemented any sort of physics engine other than some simple hand-coded forces, and wrapping my head around Box2D, joints, and the World Coordinate system was quite a challenge.

Ultimately, I would have liked to explore the fidelity of the interaction more, making the ‘game’ elements more compelling, working on colors, graphics, and overall visual polish a bit more, but was slowed down far more than I expected with the Physics.

Because my code uses many different classes, in separate tabs, I’ve pasted just the content of the primary tab, and the classes are on github: https://github.com/shelf-death/60212-Interactivity-and-Computation/tree/master/Assignments/5_Face/krawleb_FaceOSC

 

import shiffman.box2d.*;
import org.jbox2d.common.*;
import org.jbox2d.dynamics.joints.*;
import org.jbox2d.collision.shapes.*;
import org.jbox2d.collision.shapes.Shape;
import org.jbox2d.common.*;
import org.jbox2d.dynamics.*;
import org.jbox2d.dynamics.contacts.*;
import oscP5.*;

// A reference to our box2d world
Box2DProcessing box2d;

// A list we'll use to track fixed objects
ArrayList boundaries;
// A list for all of our rectangles
ArrayList boxes;
//A list of claw arms units
ArrayList claws;

//Initialize OSC object
OscP5 oscP5;

//declare new crane
Crane crane;

// face boolean and raw data array
int found;
float[] rawArray;

//which point is selected
int highlighted;

//Mouth open boolean
boolean mouthOpen;
float jawThreshhold = 0.5;

//Did you win
boolean youWin = false;
PFont proxima;


////////////////////////////////////////////////////////////////////
void setup() {
  size(960, 720);
  frameRate(60);

  oscP5 = new OscP5(this, 8338);
  oscP5.plug(this, "found", "/found");
  oscP5.plug(this, "rawData", "/raw");
  
  // Initialize box2d physics and create the world
  box2d = new Box2DProcessing(this);
  box2d.createWorld();
  // We are setting a custom gravity
  box2d.setGravity(0, -30);
  
  //Create Arraylists 
  boxes = new ArrayList();
  boundaries = new ArrayList();
  claws = new ArrayList();
  
  // Add a bunch of fixed boundaries
  boundaries.add(new Boundary(width/2,height-10,width-20,10));
  
  //add a collection box
  boundaries.add(new Boundary(width/8-45,height-180,10,50)); //left
  boundaries.add(new Boundary(width/8-5,height-150,90,10)); //bottom
  boundaries.add(new Boundary(width/8 + 35,height-180,10,50)); //right
  
  //create our crane object
  crane = new Crane();
  
  //loadfont
  proxima = createFont("data/ProximaNova-Black.otf",150);
  textFont(proxima);
}//end setup

///////////////////////////////////DRAW/////////////////////////////////
void draw() {  
  background(255);
  noStroke();
  
  //Box2D Step
  box2d.step();
  
  //Check mouth
  isMouthOpen();
  
  
  //Draw Objects
  if (rawArray!=null) {
    
    if ( found > 0 ) {
    for (int val = 0; val < rawArray.length -1; val+=2){ //if (val == highlighted){ fill(255,0,0);} //else{fill(100);} fill(240); noStroke(); ellipse(rawArray[val], rawArray[val+1],10,10); }//end points array loop //GUI AND DEBUGGING //debugging(); }//end face found check crane.update(); crane.drawCrane(); }//end rawArray length check // Display all the boundaries for (Boundary wall: boundaries) { wall.display(); }//end boundary draw // Display all the boxes for (Box b: boxes) { b.display(); didYouWin(b); }//end box draw // Boxes that leave the screen, we delete them // (note they have to be deleted from both the box2d world and our list for (int i = boxes.size()-1; i >= 0; i--) {
    Box b = boxes.get(i);
    if (b.done()) {
      boxes.remove(i);
    }
  }
  
  //Display the cord
  crane.cord.display();
  
  // Display all the claws
  for (int i = 0; i < claws.size(); i++) { claws.get(i).display(); }//end claw draw if (youWin == true){ textAlign(CENTER); textSize(100); text("YOU WIN!",width/2,height/2); } }//end Draw ///////////////////////////////Crane Class////////////////////////////////// class Crane { Float glideSpeed = 0.02; PVector pulley,cross,base; Float baseWidth = width*0.05; Float baseHeight = height*0.9; Float crossWidth = width*0.875; Float crossHeight = height*0.04; Float pulleyWidth = 50.0; Float pulleyHeight = 50.0; Bridge cord; Crane(){ pulley = new PVector (width/2,height/2); cross = new PVector (width*0.1,height*0.5); base = new PVector(width*0.9,height*0.1); //length , number , anchorX, anchorY cord = new Bridge(width/5,width/40,pulley.x,pulley.y); } //update method void update(){ //update crossbar with top of nose Y value cross.y = cross.y - (cross.y - rawArray[55])*glideSpeed; //update pulley with top of nose X value pulley.x = (pulley.x - (pulley.x - rawArray[54])*glideSpeed); //update pulley Y with same as crossbar pulley.y = cross.y; //update the cord position Vec2 pulleyWorld = new Vec2(box2d.coordPixelsToWorld(pulley)); Vec2 anchorVec = cord.particles.get(0).body.getPosition(); cord.particles.get(0).body.setLinearVelocity(new Vec2( (pulleyWorld.x-anchorVec.x)*8, (pulleyWorld.y-anchorVec.y)*8)); } //drawCrane method void drawCrane(){ //stroke(0); noStroke(); fill(0); rectMode(CORNER); //Base rect(base.x,base.y,baseWidth,baseHeight); //Crossbar rect(cross.x,cross.y,crossWidth,crossHeight); //Pulley rectMode(CENTER); rect(pulley.x,pulley.y,pulleyWidth,pulleyHeight); //Claws drawn in draw loop } }//end crane class //////////////////////////////Did You Win///////////////////////////// void didYouWin(Box box) { Vec2 boxPos = new Vec2 (box2d.getBodyPixelCoord(box.body)); if ( boxPos.x > width/8-35 && boxPos.x < width/8 + 35 && boxPos.y > height-180 && boxPos.y < height - 130){ youWin = true; } } //////////////////////////////Create New Box///////////////////////////// void newBoxes() { Box p = new Box(crane.pulley.x,height-40); boxes.add(p); }//end new boxes function //////////////////////////////Is Mouth Open///////////////////////////// void isMouthOpen() { if (dist(rawArray[102],rawArray[103],rawArray[114],rawArray[115]) > dist(rawArray[108],rawArray[109],rawArray[96],rawArray[97])*jawThreshhold){
  mouthOpen = true;
  crane.cord.toggleClaw(mouthOpen);
  //rect(0,200,10,10);
  }
  else{mouthOpen = false; crane.cord.toggleClaw(mouthOpen);}
}

///////////////////////////////Debugging//////////////////////////////////
void debugging() {
   fill(255);
   rect(0,0,160,30);
   fill(0);
   text( "current index = [" + highlighted + "," + int(highlighted + 1) + "]", 10, 20);
}//end debugging function

///////////////////////////////////////MousePressed///////////////
void mousePressed() {
  //crane.cord.toggleClaw();
}
///////////////////////////////KeyPressed//////////////////////////////////
void keyPressed(){
  if (keyCode == RIGHT){
  highlighted = (highlighted + 2) % rawArray.length;
  } //end right key
  if (keyCode == LEFT){
    highlighted = (highlighted - 2);
    if (highlighted < 0){
      highlighted = rawArray.length-1;
    }//end highlighted if
  }//end left key
  if (keyCode == UP){
  newBoxes();
  }
}//emd keypressed
////////////////////////////// OSC CALLBACK FUNCTIONS//////////////////////////////
public void found(int i) {
  println("found: " + i);
  found = i;
}//end found
public void rawData(float[] raw) {
  println("raw data saved to rawArray");
  rawArray = raw;
  for (int i = 0; i < rawArray.length; i++) {
  rawArray[i] *= 1.5;
  if (i%2 == 0){
  rawArray[i] = map(rawArray[i],0,width,width,0);
  }
  }//end for loop
}//end found

There are quite a few hacky magic numbers in here. Sorry coding style gods.

Written by Comments Off on Krawleb-FaceOSC Posted in FaceOSC

Lumar-LookingOutwards05

A lot of my ideation and thinking process for our FaceOSC project was affected  by/inspired by what I saw (or didn’t see) at weird reality’s VR salon. I’ll admit, I was a tad disappointed in some of the works – it felt as if the medium wasn’t being used to it’s fullest extent; it’s a complete 3d immersive environment, but some were such…passive viewing experiences that it was hard to say at all if the content was augmented by its medium or  if the content would’ve been equally fine as 2d or regular 3d movies. I wanted to be surprised within a modular environment – I wanted to turn around and see something new – if it was a building,…I want to know what made the VR experience better than simply walking through the actual building.

That being said, all the works were still wonderful to see! I am very grateful to have gotten the chance to go!

I got unity and tried some photogramming of my own this week!

remake

But anyway! Mars Wong. Dang. He’s only a freshman! I still haven’t gotten over the fact he started on the VR/Game design scene as only a 9 year old, and later on with a Fjord internship as a 9th grader. What on earth was I doing at that age? …typing very slowly that’s what.

http://m4r5w0n6.com/games#/interrogation/

This work I find especially interesting because after fiddling around with photogramming and unity, this piece doesn’t seem particularly arduous to do – so why do I think it’s worth mentioning? It’s a one day project to recreate an interrogation room environment/feel. With fairly simple techniques, but a really clever usage of the tools available, the deliverable definitely achieves its objectives. I bring this up, because there’s a difference between what can be done and what should be done. Some of the pieces in the VR salon felt incredibly computationally complex, but that complexity did not always translate proportionally to a more developed interaction or benefit artistically.

I liken it to traditional artists that create hyper photorealistic portraits. In that, the extra effort put in the technical execution of the portrait doesn’t generate net benefit to the piece as art – really, why bother with photorealism when a camera is so much faster? In this, I think hyperrealism is like using technology just because one ‘can’ and it is left unconsidered.

The pregnancy vr piece was my favorite. There’s the aspect of the unexpected and unnerving in it that really uses the VR medium well to achieve the effect. I wish I had gotten a chance to experience Mars’ archery game; the full body immersion/pieces wherein the user could be an active participant within the environment were always the best;

http://m4r5w0n6.com/games#/archery/

Xastol – LookingOutwards05

Among my favorite projects was the PoopVR, created by Laura Juo-Hsin Chen. The project uses a Google Cardboard, a phone, and a seat (toilets). Using the Google Cardboard, users use their own phone to enter an online VR world she has created. The VR world is rather lighthearted, with encouraging “poops” and psychedelic patterns, and serves as “motivation” when the user finds them-self in a rather “congested” situation. Additionally, the work allows other individuals partaking in this daily-task to connect with one another and, as a result, encourage each other. Personally, I’ve enjoyed the process of defecation a lot more with her project.

In terms of her approach to work, I appreciate Laura’s use of low-tech, open-source technologies to create charming work that attracts all audiences. The user doesn’t have to mentally prepare themselves to invest in her work because her playful style handles that already.

Website: http://www.jhclaura.com/

Keali-LookingOutwards05

Created by Milica Zec and Winslow Turner Porter III, the project Giant is a virtual reality experience detailing a story of a family amidst an active war zone; inspired by true events of Zec’s family during a war-torn Europe, the vision is of two parents struggling to distract their daughter by inventing a fantastical tale–that the belligerence and commotions above ground are mere antics of a giant. The audience is transported into a makeshift basement shelter in which the characters hide, becoming fully immersed in a dark and ominous atmosphere, complete with sound effects and physical motion as if one were living vicariously through someone in that virtual reality.
Being someone who has had minimal exposure and personal experience with VR, donning the Giant‘s headgear and noise-cancelling headphones was an indescribable and very intimate experience. Giant was impressive from both an artistic and technical viewpoint, boasting emotional storytelling expertise and seamless technological execution with heavy attention to detail. This work is the first VR I’ve experienced to have a fully-immersive, 360-degree view of its fictional realm; it was very invigorating, yet it also made me wary, that I could fully turn my head to view the full surroundings of a virtual room whilst within the piece: in this case, I could omnisciently scan the basement in which the family resided.
Giant was a subtle, powerful experience, and explored a concept similarly demonstrated by the film Life is Beautiful: masking darker truths with lighthearted fantasies for the sake of the innocent. It’s an entirely bittersweet intention, especially when one is seeing it from a third-party point of view.

//giant_website

Xastol – FaceOSC

faceosc_xastol_sketch

For the FaceOSC project, I decided to grow off of my plotting project (./xastol/09/29/xastol-plot/). I decided to develop the characters I generated in the plotting project as “wear-able” identities.

faceosc_xastol_gif

Every face is randomly generated and changes when the user presses the UP key. In terms of the characters in relation to the user’s face, they basically follow all movements made by the head (rotation and translation among all axises: x, y, z). Additionally, the mouth moves in relation to the user’s mouth (height and width) and the eyes change size based off of eyebrow movement: this was initially going to be in relation to the actual eye-openness of the user, however, I noticed I got a better effect while tracking the eyebrow position.

Random Face Generation Demo

 

Random Face Generation (Sound of Silence Performance)

My main goal for this project was to expand upon previous work and find new/interesting ways of presenting a concept. I felt this project was important and realizing these new ideas. In the overall scheme of things, I think I achieved my goal fairly well. However, I’m not sure if I did well in terms of maintaining the originality of the initial concept (from the plotting project). I was having a hard time deciding to strictly maintain the initial concept or use it as a catalyst and then shoot for the development of an entirely new way of presenting the initial idea. In the end, I came up with a project that is still very close to the initial idea (i.e. – generative faces, face shapes, sizes, etc.) but also has some detail changes (i.e. – new colors,  slight differences in shape movement, etc.).

 

CODE

//
// a template for receiving face tracking osc messages from
// Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker
//
// 2012 Dan Wilcox danomatika.com
// for the IACD Spring 2012 class at the CMU School of Art
//
// adapted from from Greg Borenstein's 2011 example
// http://www.gregborenstein.com/
// https://gist.github.com/1603230
//

//Xavier Apostol
//Generative Faces: Plotter Project Concept

import oscP5.*;
OscP5 oscP5;

// num faces found
int found;

// pose
float poseScale;
PVector posePosition = new PVector();
PVector poseOrientation = new PVector();

// gesture
float mouthHeight;
float mouthWidth;
float eyeLeft;
float eyeRight;
float eyebrowLeft;
float eyebrowRight;
float jaw;
float nostrils;

float sz = 1;
float spacing = 100;
float genSz = spacing/4;
float fcOff = genSz/2;

//Initialization of Colors
float R = random(255);
float G = random(255);
float B = random(255);

//Initialization of Head
float rotInt = 15;
float hdX = cos(sz) + random(genSz, 3*genSz);
float hdY = sin(sz) + random(genSz, 3*genSz);
float rotAngle = random(-rotInt,rotInt);

//Initialization of Eyes
float lEyeX1 = sin(sz*0) + random(genSz);
float lEyeY1 = cos(sz*0) + random(genSz);
float rEyeX1 = sin(sz*0) + random(genSz);
float rEyeY1 = cos(sz*0) + random(genSz);
float lEyeX2 = sin(sz*1) + random(genSz);
float lEyeY2 = cos(sz*1) + random(genSz);
float rEyeX2 = sin(sz*1) + random(genSz);
float rEyeY2 = cos(sz*1) + random(genSz);
float ranREye = random(7, 9);
float ranLEye = random(7, 9);

//Initialization of Mouth
float mthX = cos(sz) + random(genSz);
float mthY = sin(sz) + random(genSz);
float ranM = random(-0.1, 1.5);

//Initialization of Spine
float hdOffset = hdY/1.5;
float spineSz = random(genSz/2);
float spXOff1 = random(-8, 8);
float spYOff1 = hdOffset + random(genSz/3);
float spXOff2 = random(-8, 8)+spXOff1;
float spYOff2 = random(genSz/3)+spYOff1;
float spXOff3 = random(-8, 8)+spXOff2;
float spYOff3 = random(genSz/3)+spYOff2;
float spXOff4 = random(-8, 8)+spXOff3;
float spYOff4 = random(genSz/3)+spYOff3;
float spXOff5 = random(-8, 8)+spXOff4;
float spYOff5 = random(genSz/3)+spYOff4;

void setup() {
  size(800, 600, OPENGL);
  frameRate(30);

  oscP5 = new OscP5(this, 8338);
  oscP5.plug(this, "found", "/found");
  oscP5.plug(this, "poseScale", "/pose/scale");
  oscP5.plug(this, "posePosition", "/pose/position");
  oscP5.plug(this, "poseOrientation", "/pose/orientation");
  oscP5.plug(this, "mouthWidthReceived", "/gesture/mouth/width");
  oscP5.plug(this, "mouthHeightReceived", "/gesture/mouth/height");
  oscP5.plug(this, "eyeLeftReceived", "/gesture/eye/left");
  oscP5.plug(this, "eyeRightReceived", "/gesture/eye/right");
  oscP5.plug(this, "eyebrowLeftReceived", "/gesture/eyebrow/left");
  oscP5.plug(this, "eyebrowRightReceived", "/gesture/eyebrow/right");
  oscP5.plug(this, "jawReceived", "/gesture/jaw");
  oscP5.plug(this, "nostrilsReceived", "/gesture/nostrils");
}

void keyPressed() {
  if (key == CODED) {
    if (keyCode == UP) {
      //Create an entirely new character.
      
      //For Eyes
      lEyeX1 = sin(sz*0) + random(genSz);
      lEyeY1 = cos(sz*0) + random(genSz);
      rEyeX1 = sin(sz*0) + random(genSz);
      rEyeY1 = cos(sz*0) + random(genSz);
      lEyeX2 = sin(sz*1) + random(genSz);
      lEyeY2 = cos(sz*1) + random(genSz);
      rEyeX2 = sin(sz*1) + random(genSz);
      rEyeY2 = cos(sz*1) + random(genSz);
      ranREye = random(7, 9);
      ranLEye = random(7, 9);
      
      //For Mouth
      mthX = cos(sz) + random(genSz);
      mthY = sin(sz) + random(genSz);
      ranM = random(-0.1, 1.5); 
      
      //For Spine
      spineSz = random(genSz/2);
      spXOff1 = random(-8, 8);
      spYOff1 = hdOffset + random(genSz/3);
      spXOff2 = random(-8, 8) + spXOff1;
      spYOff2 = random(genSz/3) + spYOff1;
      spXOff3 = random(-8, 8) + spXOff2;
      spYOff3 = random(genSz/3) + spYOff2;
      spXOff4 = random(-8, 8) + spXOff3;
      spYOff4 = random(genSz/3) + spYOff3;
      spXOff5 = random(-8, 8) + spXOff4;
      spYOff5 = random(genSz/3) + spYOff4;
      
      //For Head
      hdX = cos(sz) + random(genSz, 3*genSz);
      hdY = sin(sz) + random(genSz, 3*genSz);
      rotAngle = random(-rotInt,rotInt);
      
      //For Colors
      R = random(255);
      G = random(255);
      B = random(255);
      draw();
    }
  }
}

void draw() {  
  background(0);
  strokeWeight(1);
  noFill();
  
  if(found != 0) {
    pushMatrix();
    translate(posePosition.x, posePosition.y);
    //Scales head and allows for rotations
    scale(poseScale*2);
    rotateY(0 - poseOrientation.y);
    rotateX(0 - poseOrientation.x);
    rotateZ(poseOrientation.z);
    rotate(radians(rotAngle));
    ellipse(0,0, hdX,hdY);
    popMatrix();
    
    //FACE
    translate(posePosition.x, posePosition.y);
    scale(poseScale);
    noFill();
    
      //Eyes
    float eyeFac = 1;
    float eyeBL = eyebrowLeft * 2;
    float eyeBR = eyebrowRight * 2;
    ellipse(-20,eyeLeft * -ranLEye, lEyeX1*eyeFac + eyeBL,lEyeY1*eyeFac + eyeBL);
    ellipse(20,eyeRight * -ranREye, rEyeX1*eyeFac + eyeBR,rEyeY1*eyeFac + eyeBR);
    ellipse(-20,eyeLeft * -ranLEye, lEyeX2*eyeFac + eyeBL,lEyeY2*eyeFac + eyeBL);
    ellipse(20,eyeRight * -ranREye, rEyeX2*eyeFac + eyeBR,rEyeY2*eyeFac + eyeBR);
    
      //Mouth
    ellipse(0, 20*ranM, mouthWidth* mthX/3, mouthHeight * mthY);
    
        //BODY/BUBBLES
    stroke(R,G,B);
    ellipse(spXOff1,spYOff1, spineSz,spineSz);
    ellipse(spXOff2,spYOff2, spineSz,spineSz);
    ellipse(spXOff3,spYOff3, spineSz,spineSz);
    ellipse(spXOff4,spYOff4, spineSz,spineSz);
    ellipse(spXOff5,spYOff5, spineSz,spineSz);
   
  }
}

// OSC CALLBACK FUNCTIONS

public void found(int i) {
  println("found: " + i);
  found = i;
}

public void poseScale(float s) {
  println("scale: " + s);
  poseScale = s;
}

public void posePosition(float x, float y) {
  println("pose position\tX: " + x + " Y: " + y );
  posePosition.set(x, y, 0);
}

public void poseOrientation(float x, float y, float z) {
  println("pose orientation\tX: " + x + " Y: " + y + " Z: " + z);
  poseOrientation.set(x, y, z);
}

public void mouthWidthReceived(float w) {
  println("mouth Width: " + w);
  mouthWidth = w;
}

public void mouthHeightReceived(float h) {
  println("mouth height: " + h);
  mouthHeight = h;
}

public void eyeLeftReceived(float f) {
  println("eye left: " + f);
  eyeLeft = f;
}

public void eyeRightReceived(float f) {
  println("eye right: " + f);
  eyeRight = f;
}

public void eyebrowLeftReceived(float f) {
  println("eyebrow left: " + f);
  eyebrowLeft = f;
}

public void eyebrowRightReceived(float f) {
  println("eyebrow right: " + f);
  eyebrowRight = f;
}

public void jawReceived(float f) {
  println("jaw: " + f);
  jaw = f;
}

public void nostrilsReceived(float f) {
  println("nostrils: " + f);
  nostrils = f;
}

// all other OSC messages end up here
void oscEvent(OscMessage m) {
  if(m.isPlugged() == false) {
    println("UNPLUGGED: " + m);
  }
}
Written by Comments Off on Xastol – FaceOSC Posted in FaceOSC

Jaqaur – FaceOSC

For my Face OSC Project, I made a 2-D baby that reacts to the user’s face rather than directly being puppeteered by it. When it doesn’t see a face, it will become scared. When it sees a face, it is neutral/happy, and when the user makes a “funny face” (a.k.a. raised eyebrows and/or an open mouth), it will smile and laugh. Its eyes also follow the user from side to side. It’s pretty simplistic, and if I had more time with it, I would have liked to add more ways to interact with the baby (you can see some of these commented out in my code below). Still, I think it’s cute and it’s fun to play with for a few minutes.

From the start, I knew I didn’t want to make a direct puppet, but rather something that the user could interact with in some way. I also wanted to make the design in 3-D, but I had a lot of trouble figuring the 3-D version of Processing out, and I didn’t think I had enough time to devote to this project to both learn 3-D Processing and make something half decent. So, I settled for 2D. My first idea was that the user could be a sort of king or queen looking out over an army of tiny people. Different commands could be given to the army via facial expressions, and that could cause them to do different things. While I still like this idea in theory, I am not very good at animation, and didn’t know how to get 100 tiny people to move and interact naturally. My next idea, and one that I actually made, was “Bad Blocks,” a program in which the user acts as the baby-sitter for some randomly-generated block people. When the user is’t looking (a.k.a. when no face is found), the blocks run around, but when the user looks, they freeze and their facial expressions change. The user can also open his/her mouth to send the blocks back into their proper place.

screen-shot-2016-10-13-at-10-44-54-pm

This program worked okay, but the interactions didn’t feel very natural, and the blocks were pretty simplistic. Also, FaceOSC sometimes blinks out when the user’s face is moving quickly, contorted strangely, or poorly lit. My block program did not respond well to this, as the blocks abruptly change facial expression and start running around the second a face went away. It looks jumpy and awkward, and I decided to start over with similar interactions, but one single character that would be more detailed and have more smooth facial transitions.

image-1

That’s when I came up with the giant baby head. It seemed fairly easy to make out of geometric shapes (and it was), and it could use similar interactions to the blocks, since both have baby-sitting premises. It was important to me that the baby didn’t just jump between its three facial expressions, because that doesn’t look natural. So, I made the baby’s features be based on a float variable called “happiness” that is changed by various Face OSC input. I made sure that all of the transitions were smooth, and I am pretty proud of how that aspect of this turned out. All in all, I am content with this project. It fulfills my initial expectations for it, but I know it’s not as unique or exciting as it could be.

Here is a link to the code on Github. The code is also below:

//
// FaceOSC Baby written by Jacqueline Fashimpaur
// October 2016
//
// Based on a template for receiving face tracking osc messages from
// Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker
//
// 2012 Dan Wilcox danomatika.com
// for the IACD Spring 2012 class at the CMU School of Art
//
// adapted from from Greg Borenstein's 2011 example
// http://www.gregborenstein.com/
// https://gist.github.com/1603230
//
import oscP5.*;
OscP5 oscP5;

int[] colors;// = new int[6]; 
/*
colors[0] = 0xfeebe2;
 colors[1] = 0xfcc5c0;
 colors[2] = 0xfa9fb5;
 colors[3] = 0xf768a1;
 colors[4] = 0xc51b8a;
 colors[5] = 0x7a0177;
 */

// num faces found
int found;

// pose
float poseScale;
PVector posePosition = new PVector();
PVector poseOrientation = new PVector();

// gesture
float mouthHeight;
float mouthWidth;
float eyeLeft;
float eyeRight;
float eyebrowLeft;
float eyebrowRight;
float jaw;
float nostrils;
int skin_color_index;
int eye_color_index;
int gender_index;
float happiness;
float eye_displacement = 0;
boolean eye_right;
float baby_center_x;
float baby_center_y;

void setup() {
  size(640, 640);
  frameRate(30);
  //sets all colors
  /*colors = new int[6]; 
  colors[0] = #feebe2;
  colors[1] = #fcc5c0;
  colors[2] = #fa9fb5;
  colors[3] = #f768a1;
  colors[4] = #c51b8a;
  colors[5] = #7a0177;*/
  skin_color_index = int(random(0,4));
  eye_color_index = int(random(0,4));
  gender_index = int(random(0,2));
  happiness = 0;
  eye_displacement = 0;
  eye_right = true;
  baby_center_x = 320;
  baby_center_y = 320;
  oscP5 = new OscP5(this, 8338);
  oscP5.plug(this, "found", "/found");
  oscP5.plug(this, "poseScale", "/pose/scale");
  oscP5.plug(this, "posePosition", "/pose/position");
  oscP5.plug(this, "poseOrientation", "/pose/orientation");
  oscP5.plug(this, "mouthWidthReceived", "/gesture/mouth/width");
  oscP5.plug(this, "mouthHeightReceived", "/gesture/mouth/height");
  oscP5.plug(this, "eyeLeftReceived", "/gesture/eye/left");
  oscP5.plug(this, "eyeRightReceived", "/gesture/eye/right");
  oscP5.plug(this, "eyebrowLeftReceived", "/gesture/eyebrow/left");
  oscP5.plug(this, "eyebrowRightReceived", "/gesture/eyebrow/right");
  oscP5.plug(this, "jawReceived", "/gesture/jaw");
  oscP5.plug(this, "nostrilsReceived", "/gesture/nostrils");
}

void display() {
  eye_displacement = (((70+baby_center_x)-posePosition.x)/25)+2;
  /*if (watched()){
    eye_displacement = ((250-posePosition.x)/25)+2;
  } else {
    if (eye_right){
      eye_displacement += 1;
    }
    else {
      eye_displacement -= 1;
    }
    if (eye_displacement<-15) {eye_displacement = -15; eye_right = true;}
    if (eye_displacement>15) {eye_displacement = 15; eye_right = false;}
  }*/
  int skin_r = 141;
  int skin_g = 85;
  int skin_b = 36;
  int eye_r = 00;
  int eye_g = 128;
  int eye_b = 192;
  int clothing_r = 255;
  int clothing_g = 187;
  int clothing_b = 218;
  if (skin_color_index == 0) {
    skin_r = 255;
    skin_g = 224;
    skin_b = 186;
  } else if (skin_color_index == 1) {
    skin_r = 241;
    skin_g = 194;
    skin_b = 125;
  } else if (skin_color_index == 2) {
    skin_r = 198;
    skin_g = 134;
    skin_b = 66;
  }
  if (eye_color_index == 0) {
    eye_r = 0;
    eye_g = 192;
    eye_b = 255;
  } else if (eye_color_index == 1) {
    eye_r = 0;
    eye_g = 192;
    eye_b = 0;
  } else if (eye_color_index == 2) {
    eye_r = 83;
    eye_g = 61;
    eye_b = 53;
  }
  if (gender_index == 1){
    clothing_r = 168;
    clothing_g = 204;
    clothing_b = 232;
  }
  //draw the body
  fill(clothing_r, clothing_g, clothing_b);
  noStroke();
  ellipse(baby_center_x, (210+baby_center_y), 500, 200);
  rect(baby_center_x-(500/2), (210+baby_center_y), 500, 300);
  //draw the face
  fill(skin_r, skin_g, skin_b);
  ellipse(baby_center_x,baby_center_y-40, 350, 350);
  ellipse(baby_center_x,baby_center_y+60, 300, 220);
  beginShape();
  vertex(baby_center_x-(350/2), baby_center_y-40);
  vertex(baby_center_x-(300/2), baby_center_y+60);
  vertex(baby_center_x+(300/2), baby_center_y+60);
  vertex(baby_center_x+(350/2), baby_center_y-40);
  endShape(CLOSE);
  //draw the eyes
  fill(#eeeeee);
  ellipse(baby_center_x - 60, baby_center_y - 40, 80, 80);
  ellipse(baby_center_x + 60, baby_center_y - 40, 80, 80);
  fill(eye_r, eye_g, eye_b);
  ellipse(baby_center_x-65+eye_displacement, baby_center_y -40, 50, 50);
  ellipse(baby_center_x+55+eye_displacement, baby_center_y -40, 50, 50);
  fill(0);
  ellipse(baby_center_x-65+eye_displacement, baby_center_y -40, 25, 25);
  ellipse(baby_center_x+55+eye_displacement, baby_center_y -40, 25, 25);
  //draw the nose
  noFill();
  strokeCap(ROUND);
  stroke(skin_r - 20, skin_g - 20, skin_b - 20);
  strokeWeight(3);
  arc(baby_center_x, baby_center_y + 20, 50, 30, 0, PI, OPEN);
  //draw the mouth
  strokeWeight(10);
  if (skin_color_index == 0) stroke(#ffcccc);
  if (happiness<0){
    //unhappy
    fill(#cc6666);
    ellipse(baby_center_x, baby_center_y+80, 60-(happiness/8), 0-happiness);
  } else if (happiness<=40){
    //happy
    noFill();
    arc(baby_center_x, baby_center_y+80-(happiness/5), 60+(happiness/4), happiness/2, 0, PI, OPEN);
  } else {
    strokeWeight(8);
    fill(#cc6666);
    arc(baby_center_x, baby_center_y+81-(happiness/5), 60+(happiness/4), happiness-20, 0, PI, OPEN);
    fill(skin_r, skin_g, skin_b);
    arc(baby_center_x, baby_center_y+79-(happiness/5), 60+(happiness/4), 20+((happiness-40)/10), 0, PI, OPEN);
  }
  //draw the cheeks (range 340-380)
  noStroke();
  fill(skin_r, skin_g, skin_b);
  if (happiness>30){
    ellipse(baby_center_x-90, baby_center_y+60-(happiness/2), 100, 70);
    ellipse(baby_center_x+90, baby_center_y+60-(happiness/2), 100, 70);
  }
  //draw the eyelids (range 200-240)
  if (happiness<0){
    ellipse(baby_center_x-90, baby_center_y-120-(happiness/3), 100, 90);
    ellipse(baby_center_x+90, baby_center_y-120-(happiness/3), 100, 90);
  }
  //draw a hair
  stroke(0);
  noFill();
  strokeWeight(2);
  curve(400,10,baby_center_x,baby_center_y-200,baby_center_x-20,baby_center_y-270,0,0);
  //draw a bow? If time...
  /* fill(clothing_r, clothing_g, clothing_b);
  noStroke();
  ellipse(320,120,60,60); */
}

void draw() {  
  background(#ccffff);
  happiness += 0.5;
  if (!watched()){
    happiness-= 2;
  } else if (funnyFace()){
    happiness++;
  } else if (happiness > 40){
    happiness-=2;
    if (happiness<40) happiness = 40;
  } else {
    happiness++;
    if (happiness>40) happiness = 40;
  }
  if (happiness>90) happiness = 90;
  if (happiness<-70) happiness = -70;
  stroke(0);
  baby_center_x += 1-(2*noise(millis()/1000));
  baby_center_y += 1-(2*noise((millis()+500)/800));
  if (baby_center_x < 260) baby_center_x = 260;
  if (baby_center_x > 380) baby_center_x = 380;
  if (baby_center_y < 300) baby_center_y = 300;
  if (baby_center_y > 340) baby_center_y = 340;
  display();
  println(eyebrowLeft);
}

// OSC CALLBACK FUNCTIONS

public void found(int i) {
  //println("found: " + i);
  found = i;
}

public void poseScale(float s) {
  //println("scale: " + s);
  poseScale = s;
}

public void posePosition(float x, float y) {
 // println("pose position\tX: " + x + " Y: " + y );
  posePosition.set(x, y, 0);
}

public void poseOrientation(float x, float y, float z) {
  //println("pose orientation\tX: " + x + " Y: " + y + " Z: " + z);
  poseOrientation.set(x, y, z);
}

public void mouthWidthReceived(float w) {
  //println("mouth Width: " + w);
  mouthWidth = w;
}

public void mouthHeightReceived(float h) {
  //println("mouth height: " + h);
  mouthHeight = h;
}

public void eyeLeftReceived(float f) {
  //println("eye left: " + f);
  eyeLeft = f;
}

public void eyeRightReceived(float f) {
  //println("eye right: " + f);
  eyeRight = f;
}

public void eyebrowLeftReceived(float f) {
  //println("eyebrow left: " + f);
  eyebrowLeft = f;
}

public void eyebrowRightReceived(float f) {
  //println("eyebrow right: " + f);
  eyebrowRight = f;
}

public void jawReceived(float f) {
  //println("jaw: " + f);
  jaw = f;
}

public void nostrilsReceived(float f) {
  //println("nostrils: " + f);
  nostrils = f;
}

// all other OSC messages end up here
void oscEvent(OscMessage m) {
  if (m.isPlugged() == false) {
    println("UNPLUGGED: " + m);
  }
}

boolean watched() {
  if (found==0) {
    return false;
  }
  float left_eye_height = 10;
  float right_eye_height = 10;
  if (left_eye_height < 10 && right_eye_height <10) {
    return false;
  }
  return true;
}

boolean funnyFace(){
  if (eyebrowLeft>8 || eyebrowRight>8) return true;
  if (mouthHeight>3) return true;
  return false;
}

boolean mouthOpen(){
  if (mouthHeight>2){
    return true;
  }
  return false;
}

Here is a gif of my demo:
faceosc_baby_demo

And here is a weird thing FaceOSC did while I was testing!
screen-shot-2016-10-13-at-6-07-21-pm

Written by Comments Off on Jaqaur – FaceOSC Posted in FaceOSC

hizlik- faceosc

wallpaper

infinite

I really enjoyed making this interactive work, as it is both very pleasing for me to look at and a fun experience to be able to manipulate an imagery that has long since been static, in movies and games. Using your head, you can adjust the point of view in this virtual world, looking around the “corner” and moving side-to-side. One thing I would change if I had the time would be to spread the stars out amongst the space and have a true sense of filled space. Right now, at times, it’s evident that they’re all coming from this once central point, almost creating a tunnel. I tried to add depth by making some thin/small lights with more transparency but it’s not as good. I’m sure I’ll work on it some more this weekend.

loop

screen-shot-2016-10-14-at-12-03-05-am screen-shot-2016-10-14-at-12-02-55-am screen-shot-2016-10-14-at-12-02-47-am

 

import oscP5.*;
OscP5 oscP5;

int found;
PVector posePosition = new PVector();
PVector poseOrientation = new PVector();
float[] rawArray;
boolean showArray = false;

Coord center;
Coord new_center;
Coord bz_center;
Coord new_bz_center;
float speed = 0.02;
float ease = 0.1;
ArrayList lines = new ArrayList();
color[] colors = new color[4];

void setup() {
  //size(1280, 720, OPENGL);
  //size(640, 360, OPENGL);
  fullScreen(OPENGL);
  smooth(); 
  background(0);
  //frameRate(30);

  oscP5 = new OscP5(this, 8338);
  oscP5.plug(this, "found", "/found");
  oscP5.plug(this, "rawData", "/raw");
  oscP5.plug(this, "posePosition", "/pose/position");
  oscP5.plug(this, "poseOrientation", "/pose/orientation");
  
  colors[0] = color(169,241,230,200);
  colors[1] = color(80,214,207,200);
  colors[2] = color(11,168,159,200);
  colors[3] = color(255,255,255,200);
  
  center = new Coord(width/2, height/2);
  new_center = new Coord(width/2, height/2);
  bz_center = new Coord(width/2, height/2);
  new_bz_center = new Coord(width/2, height/2);
  for(int i=0; i<500; i++) {
    lines.add(new BezierLine()); 
  }
}

void draw() {  
  // blur effect
  noStroke();
  fill(0,0, 0, 80);
  rect(0, 0, width, height);
  //background(0);
  
  if(found > 0) {
    new_bz_center.x= width-map(poseOrientation.y, -0.25, 0.25, 0, width);
    new_bz_center.y= height-map(poseOrientation.x, -0.25, 0.25, 0, height);
    new_center.x = map(posePosition.x, 100, 600, 0, width);
    new_center.y = height-map(posePosition.y, 100, 400, 0, height);
  }
  else {
    new_bz_center.x=mouseX;
    new_bz_center.y=mouseY;
    bz_center.x=mouseX;
    bz_center.y=mouseY;
    if(mousePressed) {
      new_center.x = mouseX;
      new_center.y = mouseY;
    }
  }
  
  move();
  
  fill(255);
  for(int i=0; i 1) {
       center.x += d_cx * ease;
       center.y += d_cy * ease;
   }
   if (bz_distance > 1) {
       bz_center.x += d_bzx * ease;
       bz_center.y += d_bzy * ease;
   }
}

class BezierLine {
  private Coord end;
  private float distance;
  private int delay;
  private Coord vary;
  private color c;
  
  public BezierLine() {
    delay = int(random(100));
    int vary_amount = 5;
    vary = new Coord(random(-1*vary_amount,vary_amount), random(-1*vary_amount,vary_amount));
    resetEndpoint();
  }
  
  private void resetEndpoint() {
    float angle = random(360);
    float radius = sqrt((width*width)+(height*height))/2;
    float x = width/2 + cos(angle)*radius;
    float y = height/2 + sin(angle)*radius;
    end = new Coord(x,y);
    distance = constrain(random(-1,speed), 0, speed);
    int picker = (int)random(0,colors.length);
    c = colors[picker];
    println(picker);
  }
  
  public void draw() {
    if(delay > 0) {
      delay --;
      return;
    }
    float x1 = center.x + vary.x;
    float y1 = center.y + vary.y;
    float x2 = center.x + vary.x;
    float y2 = center.y + vary.y;
    float x3 = bz_center.x;
    float y3 = bz_center.y;
    float x4 = end.x;
    float y4 = end.y;
    
    noFill();
    stroke(255);
    //bezier(center.x, center.y, center.x, center.y, bz_center.x, bz_center.y, end.x, end.y);
    float len = map(distance, 0, 1, 0, random(.05));
    float thickness = map(distance, 0, 1, .5, random(1.5, 4));
    float tx1 = bezierPoint(x1, x2, x3, x4, distance);
    float ty1 = bezierPoint(y1, y2, y3, y4, distance);
    float tx2 = bezierPoint(x1, x2, x3, x4, constrain(distance+len, 0, 1));
    float ty2 = bezierPoint(y1, y2, y3, y4, constrain(distance+len, 0, 1));
    stroke(c);
    strokeWeight(thickness*2);
    line(tx1, ty1, tx2, ty2);
    stroke(255,255,255,128);
    strokeWeight(thickness);
    line(tx1, ty1, tx2, ty2);
    distance+=speed;
    if(distance > 1) {
      resetEndpoint();
    }
  }
}

class Coord {
  public float x;
  public float y;
  
  public Coord(float new_x, float new_y) {
    this.x = new_x;
    this.y = new_y;
  }
}

// OSC CALLBACK FUNCTIONS

public void found(int i) {
  //println("found: " + i);
  found = i;
}

public void rawData(float[] raw) {
  rawArray = raw; // stash data in array
}

public void posePosition(float x, float y) {
  //println("pose position\tX: " + x + " Y: " + y );
  //println(center.x + ", " + center.y);
  posePosition.set(x, y, 0);
}

public void poseOrientation(float x, float y, float z) {
  //println("pose orientation\tX: " + x + " Y: " + y + " Z: " + z);
  poseOrientation.set(x, y, z);
}
Written by Comments Off on hizlik- faceosc Posted in FaceOSC

kander – FaceOSC

For my FaceOSC project, I made a program that allows the user to make a composition using horizontal lines that are located at each eye The tilt of the head controls color, the height of your mouth controls line length, and the distance from eyes to eyebrows controls stroke weight. I liked the idea of being able to generate art using your face, as opposed to just controlling an object. I wish I could have expanded the concept beyond lines — my original idea (second picture) was to generate particles so you could “paint” with your eyes.

My first idea was to control a Michael Jackson face, and clicking through would modify the face through the different stages of his appearance (lol). They I thought about making a game where you use your mouth to catch objects, before I decided I wanted to make a composition using my face.

img_1668 img_1667

img_1669

GIF of me making a composition using FaceOSC GIF of me making a composition using FaceOSC
import oscP5.*;
OscP5 oscP5;

// num faces found
int found;

// pose
float poseScale;
PVector posePosition = new PVector();
PVector poseOrientation = new PVector();

// gesture
float mouthHeight;
float mouthWidth;
float eyeLeft;
float eyeRight;
float eyebrowLeft;
float eyebrowRight;
float jaw;
float nostrils;

//drawing variables
int arePainting = 0;
PImage bg;
float weightMapped;

void setup() {
  size(800, 500);
  frameRate(30);
  background(100);
  save("drawing_so_far.jpg");
  bg = loadImage("drawing_so_far.jpg");
  oscP5 = new OscP5(this, 8338);
  oscP5.plug(this, "found", "/found");
  oscP5.plug(this, "poseScale", "/pose/scale");
  oscP5.plug(this, "posePosition", "/pose/position");
  oscP5.plug(this, "poseOrientation", "/pose/orientation");
  oscP5.plug(this, "mouthWidthReceived", "/gesture/mouth/width");
  oscP5.plug(this, "mouthHeightReceived", "/gesture/mouth/height");
  oscP5.plug(this, "eyeLeftReceived", "/gesture/eye/left");
  oscP5.plug(this, "eyeRightReceived", "/gesture/eye/right");
  oscP5.plug(this, "eyebrowLeftReceived", "/gesture/eyebrow/left");
  oscP5.plug(this, "eyebrowRightReceived", "/gesture/eyebrow/right");
  oscP5.plug(this, "jawReceived", "/gesture/jaw");
  oscP5.plug(this, "nostrilsReceived", "/gesture/nostrils");
}

void setStrokeWeight(float brow1, float brow2, float eye1, float eye2) {
  float leftDist = eye1 - brow1;
  float rightDist = eye2 - brow2;
  float leftWeight = map(leftDist, 0, 25, 0, 5);
  float rightWeight = map(rightDist, 0, 25, 0, 5);
  float weightAvg = (abs(leftWeight) + abs(rightWeight))/2;
  weightMapped = map(weightAvg, 0.7, 2, 0, 6);
  strokeWeight(pow(abs(weightMapped), 2));
}


void drawLine(float y1, float y2) {
  float halfLength = map(mouthHeight, 0, 6, 0, width/3);

  line(-halfLength, y1, halfLength, y2);
} 

void mousePressed() {
  save("drawing_so_far.jpg");
  bg = loadImage("drawing_so_far.jpg");
}

void draw() {  
  if (found > 0) {
    translate(posePosition.x, posePosition.y);
    float bValueMapped = map(poseOrientation.z, -1.5, .5, 0, 255);
    stroke(255, 200, bValueMapped);
    setStrokeWeight(eyebrowLeft, eyebrowRight, eyeLeft, eyeRight); 
    background(bg);
    drawLine(eyeLeft, eyeRight);
  }
}
Written by Comments Off on kander – FaceOSC Posted in FaceOSC

ngdon-FaceOSC

imageedit_1_4727480009
afejalwfejalewfjwalj

 

I observed an interesting fact that many people (including me) brandish their heads while playing games. For example, when they want to go left, they tilt their heads toward that direction in addition to pressing the left key on the keyboard. They also exhibit different facial expressions when they’re doing different actions in the game.

Therefore I thought that in order to know what the player wants to do in the game, we only need to look at his/her face, and the mouse and keypress input are in fact redundant.

I decided to implement this idea in a first person shooter game. In this world, everyone has no body, but only a face mounted on a machine controlled by it.

I used in P3D mode lots of rotates and translates to render the 3D effect. The map is generated using prim algorithm. The enemies are controlled by a simple AI, and wander around if the player is not near, and slowly try to turn and move toward the player and attack him if he comes into a certain range.

The gameplay is really an interesting experience. It is so intuitive that I almost feel effortless controlling my character. When using keyboard/mouse to play a game, no matter how proficient I am with the binding, I always have to go through the process of: “Enemy’s shooting at me->I need to dodge left->tell finger to press left key->finger press left key->character dodge left”. But controlling with face is very different:”Enemy’s shooting at me->my head automatically tilt left->character dodge left”. So happy.

I plan on making it into a multiplayer game, so that people can compete with each other over the internet and see who’s got the best facial muscles.

snip20161014_3

 

 

import oscP5.*;
OscP5 oscP5;
int found;
float[] rawarr;
float mouthWidth;
float mouthHeight;
float poseScale;
PVector orientation = new PVector();
PVector posePos = new PVector();
public void found(int i) {
    found = i;
}
public void rawData(float[] raw) {
    rawarr = raw;
}
public void mouthWidth(float i) {
    mouthWidth = i;
}
public void mouthHeight(float i) {
    mouthHeight = i;
}
public void orientation(float x, float y, float z) {
    orientation.set(x, y, z);
}
public void poseScale(float x) {
    poseScale = x;
}
public void posePos(float x, float y) {
    posePos.set(x, y);
}

int[][] mat = new int[64][64];
int dw = 20;

String multStr(String s, int n) {
    String ms = "";
    for (int i = 0; i < n; i++) {
        ms += s;
    }
    return ms;
}


public class Prim {
    int[][] walls;
    int[][] visited;
    int[][] wallist;
    int wlen = 0;
    int w;
    int h;
    public Prim(int w, int h) {
        this.w = w;
        this.h = h;
        walls = new int[w * h * 2][3];
        visited = new int[h][w];
        wallist = new int[w * h * 2 + 1][3];
    }
    public void addcellwalls(int j, int i) {
        addwall(j, i, 1);
        addwall(j, i, 2);
        addwall(j, (i + 1), 1);
        addwall((j + 1), i, 2);
    }
    public void addwall(int j, int i, int t) {
        wallist[wlen] = new int[] {
            j, i, t
        };
        wlen++;
    }

    public void delwall1(int j, int i, int t) {
        for (int k = 0; k < walls.length; k++) {
            if (walls[k][0] == j && walls[k][1] == i && walls[k][2] == t) {
                walls[k] = new int[] {
                    -1, -1, -1
                };
            }
        }
    }
    public void delwall2(int j, int i, int t) {
        for (int k = 0; k < wlen; k++) {
            if (wallist[k][0] == j && wallist[k][1] == i && wallist[k][2] == t) {
                for (int l = k; l < wlen; l++) {
                    wallist[l] = wallist[l + 1];
                }
                wlen -= 1;
            }
        }
    }

    public int[][] getadjcells(int j, int i, int t) {
        if (t == 1) {
            return new int[][] {
                {
                    j, i
                }, {
                    j, i - 1
                }
            };
        } else {
            return new int[][] {
                {
                    j, i
                }, {
                    j - 1, i
                }
            };
        }

    }
    public boolean isvisited(int j, int i) {
        if (i < 0 || j < 0 || i >= h || j >= w) {
            return true;
        }
        return visited[i][j] == 1;
    }

    public void gen() {
        for (int i = 0; i < h; i++) {
            for (int j = 0; j < w; j++) {
                walls[(i * w + j) * 2] = new int[] {
                    j, i, 1
                };
                walls[(i * w + j) * 2 + 1] = new int[] {
                    j, i, 2
                };
                visited[i][j] = 0;
            }
        }
        int[] o = new int[] {
            floor(random(w)), floor(random(h))
        };
        addcellwalls(o[0], o[1]);
        visited[o[1]][o[0]] = 1;
        int count = 0;
        while (wlen > 0 && count < 1000000) {
            count++;
            int i = floor(random(wlen));
            int[][] adjs = getadjcells(wallist[i][0], wallist[i][1], wallist[i][2]);

            if (isvisited(adjs[0][0], adjs[0][1]) != isvisited(adjs[1][0], adjs[1][1])) {
                int uv = isvisited(adjs[0][0], adjs[0][1]) ? 1 : 0;
                visited[adjs[uv][1]][adjs[uv][0]] = 1;
                addcellwalls(adjs[uv][0], adjs[uv][1]);
                delwall1(wallist[i][0], wallist[i][1], wallist[i][2]);
                delwall2(wallist[i][0], wallist[i][1], wallist[i][2]);
            } else {
                delwall2(wallist[i][0], wallist[i][1], wallist[i][2]);
            }

        }

    }

}


public class Bullet {
    float x;
    float y;
    float z;
    PVector forw;
    float spd = 1;
    float size = 1;
    float g = 0;
    float dec = 0.5;
    int typ = 1;
    int mast = 1;
    public Bullet(float x, float y, float z, PVector forw) {
        this.x = x;
        this.y = y;
        this.z = z;
        this.forw = forw;
    }
    public void update() {
        x += forw.x * spd;
        y += forw.y * spd + g;
        z += forw.z * spd;
        g += 0.01;
        if (typ == 2) {
            size = size * dec;
            if (size <= 0.001) {
                typ = 0;
            }
        }

    }
}

void drawelec(float r) {
    pg.beginShape();
    for (int i = 0; i < 22; i++) {
        pg.vertex(i * 0.1, noise(r, 0.1 * i, 0.5 * frameCount));
    }
    pg.endShape();

}

public class Enemy {
    float x;
    float y;
    PVector forw = new PVector(0, 0, 1);
    PVector fdir = new PVector(0, 0, 1);
    float spd = 0.2;
    int state = 1;
    int hp = 12;
    float fall = 0;
    String[] names = new String[] {
        "James", "John", "Robert", "Michael", "Mary",
        "William", "David", "Richard", "Charles", "Joseph", "Thomas", "Patricia",
        "Christopher", "Linda", "Barbara", "Daniel", "Paul", "Mark", "Elizabeth", "Donald"
    };
    String name = names[floor(random(names.length))];

    float[][] mockface = new float[][] {
        {
            0, 0
        }, {
            2, 1
        }, {
            5, 3
        }, {
            6, 10
        }, {
            4, 12.5
        }, {
            2, 12.5
        }, {
            0, 12
        }

    };
    int[][] mockmouth = new int[][] {
        {
            0, 1
        }, {
            1, 1
        }, {
            3, 4
        }, {
            1, 7
        }, {
            0, 7
        }

    };

    float[][] mockeye = new float[][] {
        {
            0.5, 10
        }, {
            2.5, 9
        }, {
            4.5, 10
        }, {
            2.5, 11
        }, {
            0.5, 10
        }

    };


    public Enemy(float x, float y) {
        this.x = x;
        this.y = y;


    }
    public void nav(Prim p) {
        if (state == 1 || state == 2) {
            fdir.lerp(forw, 0.1);
        }
        if (dist(x, y, px, py) < dw * 2 && state != 0) {
            state = 3;

        }


        if (state == 1) {
            x += forw.x * spd;
            y -= forw.z * spd;
            fdir.lerp(forw, 0.1);
            for (int k = 0; k < p.walls.length; k++) {

                if (p.walls[k][2] != -1) {
                    float wallx = p.walls[k][0] * dw;
                    float wally = p.walls[k][1] * dw;

                    if ((p.walls[k][2] == 1 && x >= wallx && x <= wallx + dw && y >= wally - 3 && y <= wally + 3) || (p.walls[k][2] == 2 && y >= wally && y <= wally + dw && x >= wallx - 3 && x <= wallx + 3)) {
                        x -= forw.x * spd * 2;
                        y += forw.z * spd * 2;
                        state = 2;

                    }
                }
            }
            if (random(1.0) < 0.005) {
                state = 2;
            }

        } else if (state == 2) {
            PVector v = new PVector(forw.x, forw.z);
            v.rotate(0.1);
            forw.x = v.x;
            forw.z = v.y;
            if (random(1.0) < 0.1) {
                state = 1;
            }
        } else if (state == 3) {
            x += forw.x * spd * 0.5;
            y -= forw.z * spd * 0.5;
            for (int k = 0; k < p.walls.length; k++) {

                if (p.walls[k][2] != -1) {
                    float wallx = p.walls[k][0] * dw;
                    float wally = p.walls[k][1] * dw;

                    if ((p.walls[k][2] == 1 && x >= wallx && x <= wallx + dw && y >= wally - 3 && y <= wally + 3) || (p.walls[k][2] == 2 && y >= wally && y <= wally + dw && x >= wallx - 3 && x <= wallx + 3)) {
                        x -= forw.x * spd * 2;
                        y += forw.z * spd * 2;

                    }
                }
            }
            PVector v = new PVector(-px + x, py - y);
            v.rotate(PI);
            fdir.lerp(new PVector(v.x, 0, v.y), 0.005);
            fdir.limit(1);
            forw.lerp(fdir, 0.1);

            PVector v2 = new PVector(-fdir.x, fdir.z);
            v2.rotate(PI);
            if (noise(0.5 * frameCount) > 0.65) {
                bullets[bl] = new Bullet(x, -1.5, y, new PVector(v2.x, 0, v2.y));
                bullets[bl].size = 0.6;
                bullets[bl].spd = 0.9;
                bullets[bl].mast = 2;
                bl++;
            }

            if (dist(x, y, px, py) > dw * 2) {
                state = 2;
            }
        }
        for (int i = 0; i < bl; i++) {
            if (bullets[i].mast == 1 && bullets[i].typ == 1 && state > 0) {

                if (dist(bullets[i].x, bullets[i].z, x, y) < 2) {

                    bullets[i].typ = 0;
                    hp -= 1;
                    for (int j = 0; j < 3; j++) {
                        bullets[bl] = new Bullet(bullets[i].x, bullets[i].y, bullets[i].z - 0.01, PVector.random3D());
                        bullets[bl].size = 0.8;
                        bullets[bl].spd = 0.4;
                        bullets[bl].typ = 2;
                        bullets[bl].dec = 0.8;
                        bl++;
                    }
                    if (hp <= 0) {
                        score += 100;
                        for (int j = 0; j < 10; j++) {
                            bullets[bl] = new Bullet(x, -3, y, PVector.random3D());
                            bullets[bl].size = 3;
                            bullets[bl].spd = 0.4;
                            bullets[bl].typ = 2;
                            bullets[bl].dec = 0.8;
                            bl++;
                        }
                    }
                }
            }
        }
        if (hp <= 0) {
            this.state = 0;
        }
    }


    public void drawenem() {
        pg.pushMatrix();
        pg.translate(x, 0, y);
        pg.rotateY(-PI / 2 + atan2(forw.z, forw.x));
        if (this.state == 0) {
            pg.translate(0, 7, 0);
            //rotateY(random(PI*2));
            pg.rotateX(-fall);
            if (fall < PI / 2) {
                fall += 0.1;
            }
            pg.translate(0, -7, 0);
            pg.stroke(100);
            pg.strokeWeight(2);
            pg.fill(100);
            pg.pushMatrix();
            pg.translate(0, 7, 0);
            pg.box(2.5, 2, 2.5);
            pg.translate(0, -3, 0);
            //pg.box(1.5,6,0.4);
            pg.translate(-1.1, -2, 0);
            pg.box(0.1, 9, 0.1);
            pg.translate(2.2, 0, 0);
            pg.box(0.1, 9, 0.1);
            pg.popMatrix();
            pg.fill(255);


        } else {
            pg.stroke(100);
            pg.strokeWeight(2);
            pg.fill(100);
            pg.pushMatrix();
            pg.translate(0, 7, 0);
            pg.box(2.5, 2, 2.5);
            pg.translate(0, -3, 0);
            //pg.box(1.5,6,0.4);
            pg.translate(-1.1, -2, 0);
            pg.box(0.1, 9, 0.1);
            pg.translate(2.2, 0, 0);
            pg.box(0.1, 9, 0.1);
            pg.popMatrix();
            pg.fill(255);

            pg.pushMatrix();
            pg.translate(0, -1.2, -0.5);
            //scale(0.2);
            pg.fill(100);
            pg.textSize(12);
            if (dist(x, y, px, py) < dw * 3) {
                pg.pushMatrix();
                pg.translate(0, -3, 0);
                pg.scale(0.05);
                pg.textAlign(CENTER);
                pg.textMode(SHAPE);
                pg.rotateY(PI);
                pg.text(name, 0, 0);
                pg.popMatrix();
            }
            pg.fill(255);
            pg.rotateY(PI / 2 - atan2(forw.z, forw.x));
            pg.rotateY(-PI / 2 + atan2(fdir.z, fdir.x));
            pg.rotateY(-PI / 8);
            pg.beginShape();

            for (int i = 0; i < mockface.length; i++) {

                pg.vertex(mockface[i][0] * 0.15, -mockface[i][1] * 0.15);
            }
            pg.endShape();
            pg.beginShape();
            for (int i = 0; i < mockmouth.length; i++) {

                pg.vertex(mockmouth[i][0] * 0.15, 0.15 * (-4 + (mockmouth[i][1] - 4) * noise(0.5 * frameCount)));
            }
            pg.endShape();
            pg.beginShape();
            for (int i = 0; i < mockeye.length; i++) {

                pg.vertex(mockeye[i][0] * 0.15, -mockeye[i][1] * 0.15);
            }
            pg.endShape();
            pg.rotateY(PI / 4);
            pg.beginShape();

            for (int i = mockface.length - 1; i >= 0; i--) {
                pg.vertex(-mockface[i][0] * 0.15, -mockface[i][1] * 0.15);
            }
            pg.endShape();
            pg.beginShape();
            for (int i = mockmouth.length - 1; i >= 0; i--) {
                pg.vertex(-mockmouth[i][0] * 0.15, 0.15 * (-4 + (mockmouth[i][1] - 4) * noise(0.5 * frameCount)));
            }
            pg.endShape();
            pg.beginShape();
            for (int i = mockeye.length - 1; i >= 0; i--) {
                pg.vertex(-mockeye[i][0] * 0.15, -mockeye[i][1] * 0.15);
            }
            pg.endShape();
            pg.popMatrix();

            pg.pushMatrix();
            pg.translate(-1.1, -2.2, 0);
            for (int i = 0; i < 2; i++) {
                drawelec(i);
            }
            pg.popMatrix();

            for (int i = 0; i < 3; i++) {
                pg.pushMatrix();
                pg.translate(-1.1, -2 + 8 * noise(i * 10, 0.1 * frameCount), 0);
                drawelec(i);
                pg.popMatrix();
            }

        }
        pg.popMatrix();
    }


}



Prim p = new Prim(16, 16);

float px = dw * 8.5;
float py = dw * 8.5;
PVector forward;
PVector left;
PVector thwart;
PVector movement;
Bullet[] bullets = new Bullet[1024];
int bl = 0;
float[] farr = new float[256];
Enemy[] enemies = new Enemy[256];
int el = 0;
PGraphics pg;
float health = 100;
int score = 0;
PFont tfont;
PFont dfont;
void setup() {
    health = 100;
    score = 0;
    tfont = createFont("OCR A Std", 18);
    dfont = createFont("Lucida Sans", 12);
    size(720, 576, P3D);
    pg = createGraphics(720, 576, P3D);
    frameRate(30);
    oscP5 = new OscP5(this, 8338);
    oscP5.plug(this, "found", "/found");
    oscP5.plug(this, "rawData", "/raw");
    oscP5.plug(this, "orientation", "/pose/orientation");
    oscP5.plug(this, "mouthWidth", "/gesture/mouth/width");
    oscP5.plug(this, "mouthHeight", "/gesture/mouth/height");
    oscP5.plug(this, "poseScale", "/pose/scale");
    oscP5.plug(this, "posePos", "/pose/position");
    p.gen();
    //mat = makeMaze(mat[0].length,mat.length);
    forward = new PVector(0, 1, 0);
    left = new PVector(0, 1, 0);
    for (int i = 0; i < 100; i++) {

        enemies[el] = new Enemy((floor(random(20)) + 0.5) * dw, (floor(random(20) + 0) + 0.5) * dw);
        while (dist(enemies[el].x, enemies[el].y, px, py) < 50) {

            enemies[el] = new Enemy((floor(random(20)) + 0.5) * dw, (floor(random(20) + 0) + 0.5) * dw);
        }


        el++;
    }

}

void draw() {
    if (found != 0 && health > 0) {
        left.x = forward.x;
        left.y = forward.y;
        left.z = forward.z;
        left.rotate(PI / 2);
        thwart = new PVector(1, 1, 1);

        pg.beginDraw();
        pg.pushMatrix();
        pg.background(240);
        //beginpg.camera();

        pg.camera(px, 0, py, px + forward.x, 0, py + forward.y, 0, 1, 0);
        if (keyPressed) {
            if (key == 'm') {
                pg.camera(px, -100, py, px + forward.x, 0, py + forward.y, 0, 1, 0);
            }
        }
        pg.frustum(-0.1, 0.1, -0.1, 0.1, 0.1, 200);
        pg.scale(1, 0.72 / 0.576, 1);
        pg.pushMatrix();
        pg.fill(255, 0, 0);
        pg.translate(px, 0, py);
        //pg.sphere(2);
        pg.popMatrix();

        pg.pushMatrix();
        pg.fill(255, 0, 0);
        pg.translate(px + forward.x * 3, 0, py + forward.y * 3);
        //pg.sphere(1);
        pg.popMatrix();

        pg.stroke(100);
        pg.strokeWeight(2);
        //pg.noStroke();
        //pg.translate(100,100);
        //pg.translate(-px,0,-py);
        //rotateY(frameCount*0.1);
        for (int i = 0; i < p.walls.length; i++) {
            pg.pushMatrix();
            if (p.walls[i][2] != -1) {
                float wallx = p.walls[i][0] * dw;
                float wally = p.walls[i][1] * dw;
                pg.translate(wallx, 0, wally);
                pg.fill(constrain(map(dist(wallx, wally, px, py), 0, 100, 255, 240), 240, 255));
                if (p.walls[i][2] == 1) {


                    if (px >= wallx && px <= wallx + dw && py >= wally - 2 && py <= wally + 2) {
                        //thwart.x = 0;
                        //thwart.y = 0;
                        px -= movement.x;
                        py -= movement.y;
                        //pg.fill(255,0,0);
                    }
                    pg.translate(dw / 2, 0, 0);
                    pg.box(dw, 16, 2);

                } else {

                    if (py >= wally && py <= wally + dw && px >= wallx - 2 && px <= wallx + 2) {
                        //thwart.x = 0;
                        //thwart.y = 0;
                        px -= movement.x;
                        py -= movement.y;
                        //pg.fill(255,0,255);
                    }
                    pg.translate(0, 0, dw / 2);
                    pg.box(2, 16, dw);
                }
            }
            pg.popMatrix();
        }
        for (int i = 0; i < bl; i++) {
            pg.pushMatrix();
            pg.translate(bullets[i].x, bullets[i].y, bullets[i].z);
            pg.fill(100);
            pg.noStroke();
            pg.sphere(bullets[i].size / 2 + bullets[i].size / 2 * noise(0.5 * i, 0.1 * frameCount));
            bullets[i].update();
            pg.popMatrix();
            if (bullets[i].typ == 1) {
                if (bullets[i].y > 8) {


                    for (int j = 0; j < 3; j++) {
                        bullets[bl] = new Bullet(bullets[i].x, bullets[i].y, bullets[i].z - 0.01, PVector.random3D());
                        bullets[bl].size = 0.8;
                        bullets[bl].spd = 0.4;
                        bullets[bl].typ = 2;
                        bullets[bl].dec = 0.8;
                        bl++;
                    }
                    bullets[i].typ = 0;
                }
                for (int k = 0; k < p.walls.length; k++) {

                    if (p.walls[k][2] != -1) {
                        float wallx = p.walls[k][0] * dw;
                        float wally = p.walls[k][1] * dw;
                        float bx = bullets[i].x;
                        float by = bullets[i].z;
                        if ((p.walls[k][2] == 1 && bx >= wallx && bx <= wallx + dw && by >= wally - 1 && by <= wally + 1) || (p.walls[k][2] == 2 && by >= wally && by <= wally + dw && bx >= wallx - 1 && bx <= wallx + 1)) {
                            for (int j = 0; j < 3; j++) {
                                bullets[bl] = new Bullet(bullets[i].x, bullets[i].y, bullets[i].z - 0.01, PVector.random3D());
                                bullets[bl].size = 0.8;
                                bullets[bl].spd = 0.4;
                                bullets[bl].typ = 2;
                                bullets[bl].dec = 0.8;
                                bl++;
                            }
                            bullets[i].typ = 0;


                        }
                    }
                }
                if (bullets[i].mast == 2 && dist(bullets[i].x, bullets[i].z, px, py) < 2) {
                    health -= 2;
                    bullets[i].typ = 0;
                }


            }


            if (bullets[i].typ == 0) {
                for (int j = i; j < bl; j++) {
                    bullets[j] = bullets[j + 1];
                }
                bl--;
            }
        }
        for (int i = 0; i < el; i++) {
            enemies[i].drawenem();
            enemies[i].nav(p);
        }

        pg.popMatrix();
        pg.endDraw();
        image(pg, 0, 0);
        noFill();

        for (int i = 0; i < rawarr.length; i++) {
            farr[i] = rawarr[i];
            if (i % 2 == 0) {
                farr[i] = (640 - farr[i]) * 720 / 640;
            }
            if (i % 2 == 1) {
                farr[i] = farr[i] * 576 / 480;
            }

        }

        stroke(150);
        line(width / 2 - 200, height / 2 - 200, width / 2 - 200, height);
        line(width / 2 + 200, height / 2 - 200, width / 2 + 200, height);
        pushMatrix();
        translate(width / 2 - 280, height / 2 - 200);

        beginShape();
        for (int i = 0; i < 57; i++) {
            //vertex(i*10,50*noise(0.1*i,0.5*frameCount));
        }
        endShape();

        popMatrix();
        beginShape();
        for (int i = 0; i < 34; i += 2) {
            vertex(farr[i], farr[i + 1]);
        }
        for (int i = 52; i > 32; i -= 2) {
            vertex(farr[i], farr[i + 1]);
        }
        endShape(CLOSE);
        beginShape();
        for (int i = 72; i < 84; i += 2) {
            vertex(farr[i], farr[i + 1]);
        }
        endShape(CLOSE);
        beginShape();
        for (int i = 84; i < 96; i += 2) {
            vertex(farr[i], farr[i + 1]);
        }
        endShape(CLOSE);


        beginShape();
        for (int i = 96; i < 110; i += 2) {
            vertex(farr[i], farr[i + 1]);
        }

        for (int i = 124; i > 118; i -= 2) {
            vertex(farr[i], farr[i + 1]);
        }
        endShape(CLOSE);
        beginShape();
        for (int i = 108; i < 118; i += 2) {
            vertex(farr[i], farr[i + 1]);
        }
        vertex(farr[96], farr[97]);
        for (int i = 130; i > 124; i -= 2) {
            vertex(farr[i], farr[i + 1]);
        }
        endShape(CLOSE);

        //println(mouthHeight);
        if (mouthHeight > 1.8) {
            bullets[bl] = new Bullet(px + forward.x, 0.8, py + forward.y, new PVector(forward.x, 0, forward.y));
            bullets[bl].size = 0.6;
            bl++;

            for (int i = 0; i < 5; i++) {
                bullets[bl] = new Bullet(px + forward.x * 0.2, 0.3, py + forward.y * 0.2, PVector.random3D());
                bullets[bl].size = 0.2;
                bullets[bl].spd = 0.08;
                bullets[bl].typ = 2;
                bl++;
            }
        }
        if (poseScale > 5.3) {
            px += 0.3 * forward.x * thwart.x;
            py += 0.3 * forward.y * thwart.y;
            movement = forward.copy();
        }
        if (poseScale < 4.7) {
            px -= 0.3 * forward.x * thwart.x;
            py -= 0.3 * forward.y * thwart.x;
            movement = forward.copy();
            movement.rotate(PI);
        }
        float roty = degrees(orientation.y);
        if (roty > 4) {
            forward.rotate(0.04 + 0.01 * (roty - 5));
        }
        if (roty < -4) {
            forward.rotate(-0.04 + 0.01 * (roty + 5));
        }

        if (posePos.x > 340) {

            px -= 0.3 * left.x;
            py -= 0.3 * left.y;
            movement = left.copy();
            movement.rotate(PI);
        }
        if (posePos.x < 300) {
            px += 0.3 * left.x;
            py += 0.3 * left.y;
            movement = left.copy();

        }
        println(health);
    }
    fill(150);
    noStroke();
    //textFont(tfont);
    textAlign(CENTER);
    textSize(16);

    textFont(dfont);
    //text("["+multStr(".",floor(100-health)/4)+multStr("|",floor(health/2))+multStr(".",floor(100-health)/4)+"]",width/2,100);
    text("[" + multStr("|", floor(health / 2)) + "]", width / 2, 550);
    textFont(tfont);
    text(score, width / 2, 50);
    //rect(0,0,health*7.2,4);
    if (health <= 0) {
        textAlign(CENTER);
        textFont(tfont);
        text("GAME OVER", width / 2, height / 2);
    }

}
Written by Comments Off on ngdon-FaceOSC Posted in FaceOSC