sapeck-Body

My goal was to create an environment of balloons where a motion-captured person interacts by popping the balloons. I chose a fight BVH from mocapdata.com that causes the figure to send a serious of punches at the wall of balloons. I looked through a lot of different fight sequences, and this one seemed to fit the best. I think it would have worked better if I had a continuously/endlessly walking and fighting person going through a never-ending stream of balloons. Otherwise, it's just boring.

I wanted to play with the placement of the balloons and having them move out of the way or drift off, but three.js made things difficult. For example, I found a good way to create a matte finish on the balloons, but that would prevent me from setting the opacity to hide the popped ones. I also found a good balloon 3D model, but I could not get three.js to display it. If I use three.js in the future, I need to have a much better understanding of it.

import colors from '../data/colorsHex'
 
var clock = new THREE.Clock()
 
var camera, controls, scene, renderer
var mixer, skeletonHelper
 
init()
animate()
 
var loader = new THREE.BVHLoader()
loader.load('bvh/fighting-31-syotei-yokoyama.bvh', result => {
  skeletonHelper = new THREE.SkeletonHelper(result.skeleton.bones[0])
  skeletonHelper.material.linewidth = 10
  skeletonHelper.skeleton = result.skeleton // allow animation mixer to bind to SkeletonHelper directly
 
  var boneContainer = new THREE.Group()
  boneContainer.add(result.skeleton.bones[0])
 
  scene.add(skeletonHelper)
  scene.add(boneContainer)
 
  // play animation
  mixer = new THREE.AnimationMixer(skeletonHelper)
  mixer.clipAction(result.clip).setEffectiveWeight(1.0).play()
})
 
// create an AudioListener and add it to the camera
var listener = new THREE.AudioListener()
camera.add(listener)
 
// create a global audio source
var sound = new THREE.Audio(listener)
 
// load a sound and set it as the Audio object's buffer
var audioLoader = new THREE.AudioLoader()
audioLoader.load('audio/Balloon Popping-SoundBible.com-1247261379.wav', buffer => {
  sound.setBuffer(buffer)
  sound.setLoop(false)
  sound.setVolume(1)
})
 
var ambientLight = new THREE.AmbientLight(0x000000)
scene.add(ambientLight)
 
var lights = []
lights[0] = new THREE.PointLight(0xffffff, 1, 0)
lights[1] = new THREE.PointLight(0xffffff, 1, 0)
lights[2] = new THREE.PointLight(0xffffff, 1, 0)
 
lights[0].position.set(0, 2000, 0)
lights[1].position.set(1000, 2000, 0)
lights[2].position.set(-1000, -2000, 0)
 
scene.add(lights[0])
scene.add(lights[1])
scene.add(lights[2])
 
let newBalloon = (r, color, x, y, z, o) => {
  var geometry = new THREE.SphereGeometry(r, 32, 32)
  var material = new THREE.MeshStandardMaterial({
    color: color,
    wireframe: false,
    transparent: true,
    opacity: o
  })
  var sphere = new THREE.Mesh(geometry, material)
  sphere.position.set(x, y, z)
  return sphere
}
 
let newBalloonGrid = (r, i, s, o) => {
  let balloons = []
  let pad = (r * 2) + s
  let c = ((i - 1) * pad) / 2
  for (let x of Array(i).keys()) {
    for (let y of Array(i - 4).keys()) {
      for (let z of Array(i - 2).keys()) {
        let color = colors[Math.floor(Math.random() * colors.length)]
        let bx = x * pad - c + 100
        let by = y * pad + r
        let bz = z * pad - c + 250
        let balloon = newBalloon(r, color, bx, by, bz, o)
        scene.add(balloon)
        balloons.push({
          pos: {
            x: bx,
            y: by,
            z: bz
          },
          r: r,
          o: o,
          color: color,
          mesh: balloon
        })
      }
    }
  }
  return balloons
}
let balloons = newBalloonGrid(20, 10, 5, 1)
 
function init () {
  camera = new THREE.PerspectiveCamera(90, window.innerWidth / window.innerHeight, 1, 1000)
  camera.position.set(0, 450, -400)
 
  controls = new THREE.OrbitControls(camera)
  controls.minDistance = 300
  controls.maxDistance = 700
 
  scene = new THREE.Scene()
 
  scene.add(new THREE.GridHelper(200, 10))
 
  // renderer
  renderer = new THREE.WebGLRenderer({ antialias: true })
  renderer.setClearColor(0xeeeeee)
  renderer.setPixelRatio(window.devicePixelRatio)
  renderer.setSize(window.innerWidth, window.innerHeight)
 
  document.body.appendChild(renderer.domElement)
 
  window.addEventListener('resize', onWindowResize, false)
}
 
function onWindowResize () {
  camera.aspect = window.innerWidth / window.innerHeight
  camera.updateProjectionMatrix()
 
  renderer.setSize(window.innerWidth, window.innerHeight)
}
 
var set = false
function animate () {
  // if (!isPlay) return
  window.requestAnimationFrame(animate)
 
  var delta = clock.getDelta()
 
  if (mixer) mixer.update(delta)
  // if (skeletonHelper) skeletonHelper.update()
 
  renderer.render(scene, camera)
 
  if (skeletonHelper) {
    if (!set) {
      console.log(skeletonHelper.skeleton.bones)
      set = true
    }
    if (skeletonHelper.skeleton) {
      for (let bone of skeletonHelper.skeleton.bones) {
        if (bone.name !== 'ENDSITE') {
          for (let balloon of balloons) {
            // console.log(skeletonHelper.skeleton.bones)
            let ballPos = balloon.pos
            let bonePos = bone.position
            let dist = Math.sqrt(Math.pow(ballPos.x - bonePos.x, 2) + Math.pow(ballPos.y - bonePos.y, 2) + Math.pow(ballPos.z - bonePos.z, 2))
            // console.log({ dist, ballPos, bonePos, name: bone.name })
            if (dist <= balloon.r * 4 && balloon.mesh.material.opacity !== 0) {
              console.log('KILL BALLOON')
              // console.log({ dist, ballPos, bonePos, name: bone.name })
              // scene.remove(balloon.mesh)
              if (balloon.mesh.material.opacity !== 0) {
                if (sound.isPlaying) sound.stop()
                sound.play()
              }
              balloon.mesh.material.opacity = 0
              // balloons.splice(balloons.indexOf(balloon))
              // scene.add(newBalloon(balloon.r, balloon.color, ballPos.x, ballPos.y, ballPos.z, balloon.o))
            }
          }
        }
      }
    }
  }
}

sapeck-LookingOutwards03

483 Lines Second Edition (2015) by Mimi Son explores how light and image can create a surreal digital environment. The interactivity is in how the viewer views the piece. Today, I viewed one of Memo Atken's pieces that explores creating different environments in each eye in virtual reality. The users explore the space by moving their head in the VR environment and by attempting to focus on different parts. Son's work attempts to create similar surreal environments in reality through projection. Standing closer or farther away from the lines create senses of motion through the plane of lines. Looking at the piece from the angle of a tunnel creates a sense of motion along the plane of lines.

sapeck-Viewing04

Spectacle is the ability to prioritize the technical and aesthetic properties of a medium over the conceptual exploration of a medium. Speculation is the ability to prioritize conceptual exploration of a medium over the technical and aesthetic properties of a medium.

Universal Everything's Walking City (2014) is mostly spectacle. The piece explores the methods of animating the walking motion and the transitions between the methods. The character's motion is constant and continuous. The character does not exhibit any emotion and is walking toward nothing. The background is blank. The purpose is to show the animation skill, not display a meaning.

The piece leans toward acceleration, is it shows off a new technical boundary of animation. The piece is very visible, as it very clearly shows what it is demonstrating. It is surplus, as it is useful for future work but useless from a conceptual standpoint. It was created commercially as a technical demonstration of the studio's ability. It shows only function.

sapeck-telematic

Unfortunately, this demo sometimes has issues being embedded due to camera integration. Please run it here if it doesn't work: https://face-game.github.io/

The Face Game is an attempt to create awkward, anonymized interactions by pairing two players' facial expressions.

Players move around their face in various positions without knowing that their picture is being taken. After a few head-moving tasks, players are shown their face side-by-side with other players who have completed the same tasks. The intended affect is to make two players seem like they may be kissing or licking each other. After the game completes, the player's images are upload to a database where (pending approval to filter out NSFW content) they can then be randomly selected to be shown when a new player plays the game. The game's interaction is one-to-many where the many is infinitely growing. The anonymous yet intimate nature of the game makes players both uncomfortable seeing their intimate faces next to a stranger but comfortable in that they don't know the stranger and the stranger did not witness the interaction.

I think that my project is successful in creating an awkward interaction. When I tested the game on my peers, it took them a moment to figure out how to move the cursor, but they then got the hang of it very quickly. One pitfall is that moving the head to the edge of the screen often moves it out of frame if the player is too close to the camera. Another pitfall is in the varying quality of computer webcams. However, the game works fine most of the time. My peers found it odd to see a picture of someone else at the end, but they always laughed. They would then want to play it again and see if they got a different result.

My original idea was to have two face silhouettes side by side. One user on one side and one user on the other. The game would coerce the two players to kiss and then snap a picture. However, this was difficult to implement and hard to understand. I think that the one-to-many approach with the hidden goal is much more successful.

Source code on GitHub: face-game/face-game
Image database on GitHub: face-game/face-game-content
Backend hosted on Glitch: face-game

sapeck-LookingOutwards02

Glenn Marshall combined GIF loops and generative neural styling to cover the loops in flowing, pixelating, continuous plasma. I like the combination of order and disorder that each GIF shows. In one way, the GIFs obviously show a human head, but they are filled with noise. What is so satisfying is that the pixelation and graininess that usually comes from such noise creates moving, flowing patterns in the pieces. There is no write up for these pieces other than that they are a neural style transfer. I assume that Marshall started with the flowing GIFs and a reference image with noise or scales and applied a neural style transfer to it. So, the GIF loops transformed to take on the style of the reference image. Marshall's touch is in how he trained the neural network and which GIF loop he choose to work with. Marshall used the computer to build off of his own work. The computer created a derivative, not an entirely new piece.
Glenn Marshall Neural GIF Loop
Glenn Marshall Neural GIF Loop
Neural GIF Loops by Glenn Marshall (2018)

sapeck-Reading03

While I agree with Naimark's claim, I don't think that it can should be used as a scale to evaluate work. Between every first and last word is an always-growing, still-significant body of work. If the first word is determined chronologically but the last word is determined by comparison of importance, then there is no definition for what else is notable. The general public may best remember the first and last word artists best (e.x. Beethoven, Pollock, Warhol), but that doesn't void the value of other artists.

The intent to develop first or last word art also creates a dilemma. If an artists attempts to create a last word art piece, then they will work forever and maybe never create a final work unless they are the next Beethoven. As Naimark's article notes, Beethoven's work has withstood the test of time. If an artist works to create as many first word pieces as possible, then their body of work will be rushed and never reach beyond a shallow degree of complexity.

sapeck-Clock

With no human present:

When a human is present, the time runs away from the human (either via face detection or mouse click):

The time running away to the other side of the screen:

Embeded demo (responds to both mouse clicks and face detection in the browser):

This clock is scared of people. You can't run away from time if it runs away from you. I imagine this clock on someone's nightstand or in their kitchen like a pet fish. Like a fish swims to the opposite side of the bowl when a hand nears, the time runs to the opposite side of the screen when anyone is in view. The clock is both infuriating, as it doesn't tell you the time when you need it and reflective how necessary time is. I don't think I am as experienced enough to make the time look truly animated and living. I added a flicker and blinks to make it seem more alive, but the motion from an easing function isn't natural enough.

/* Sapeck    9/20/2017
/* Sapeck    9/20/2017
"sapeck-Clock"
60-212                        Carnegie Mellon University
Copyright (C) 2018-present  Sapeck
This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License version 3 as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
*/
 
var DEBUG = false;
 
var prevSec;
var millisRolloverTime;
var mils;
var moveStartTime = 0;
var sideCounter = 0;
var backColor = 255;
var returning = false;
 
var moveTime = [0, 0, 0];
var sideMoveTime = 0;
var currSideToX = 0;
var moving = false;
var sideMoving = false;
 
var ctracker;
 
function setup() {
  createCanvas(800, 800);
  millisRolloverTime = 0;
 
  var videoInput = createCapture();
  videoInput.size(640, 480);
  //videoInput.position(0, 0);
 
  ctracker = new clm.tracker();
  ctracker.init();
  ctracker.start(videoInput.elt);
}
function draw() {
  time = [hour(), minute(), second(),0];
  updateMils(time[2]);
  time[3] = mils;
  showTime = [nf(time[0],2), nf(time[1],2), nf(time[2],2), nf(time[0],3)];
  stutterDelay = false;
 
 
  if (moving) backColor--;
  else backColor++;
  if (backColor > 255) backColor = 255;
  else if (backColor < 0) backColor = 0;
  background(backColor);
 
  var positions = ctracker.getCurrentPosition();
  noFill();
  stroke(color(0,255,0));
  if (positions.length > 0 && DEBUG) {
    rect(map(positions[0][0], 0, 640, 0, width), map(positions[0][1], 0, 480, 0, height), 100, 100);
  }
 
  if (positions.length > 0 || mouseIsPressed) {
    moving = true;
    sideCounter++;
    if (!sideMoving) moveStartTime = millis();
  } else {
    moving = false;
  }
 
  push();
  translate(width/2, height/2);
  y = [
    int(height/2 * function_PennerEaseOutElastic(moveTime[0])),
    int(height/2 * function_PennerEaseOutElastic(moveTime[1])),
    int(height/2 * function_PennerEaseOutElastic(moveTime[2]))
  ];    
  showTime[1] = nf(int(time[1]+(30*function_PennerEaseOutElastic(moveTime[0])*noise(moveTime[0]))),2);
  showTime[0] = nf(int(time[0]+(30*function_PennerEaseOutElastic(moveTime[0]+0.0025)*noise(moveTime[0]+0.0025))),2);
  showTime[2] = nf(int(time[2]+(30*function_PennerEaseOutElastic(moveTime[0]+0.0025)*noise(moveTime[0]+0.0025))),2);
  if (!sideMoving) {
    if (moving) {
      moveTime[0] += 0.005;
      if (moveTime[0] >= 0.020) moveTime[1] += 0.005;
      if (moveTime[1] >= 0.040) moveTime[2] += 0.005;
      for (var i=0;i<moveTime.length;i++) if (moveTime[i] > 1) moveTime[i] = 1;
    } else if (!moving && (moveTime[0] > 0 || moveTime[1] > 0 || moveTime[2] > 0)) {
      moveTime[0] -= 0.0025;
      if (moveTime[0] <= 0.980) moveTime[1] -= 0.0025;
      if (moveTime[1] <= 0.960) moveTime[2] -= 0.0025;
      for (var j=0;j<moveTime.length;j++) if (moveTime[j] < 0) moveTime[j] = 0;
    } else {
      for (var k=0;k<moveTime.length;k++) moveTime[k] = 0;
 
      if (time[3] % 22 == 0) {
        var jitterRand = int(random(0,3));
        var change = int(random(-1,2));
        showTime[jitterRand] = nf(time[jitterRand] + change, 2);
        stutterDelay = true;
      }
    }
  }
  textFont('VT323');
  textSize(100);
  fill(255 - backColor);
  if (stutterDelay) fill((255 - backColor) + random(80, 100));
  noStroke();
  textAlign(CENTER, CENTER);
 
  var xShift = 0;
  if (moveTime[0] > 0.40) {
    showTime[1] = "00";
    var blink = (""+frameCount).substr(-2, 1);
    var blinkB = (""+frameCount).substr(-3, 1);
    if (4 < blink && blink < 7 && (blinkB == 4 || blinkB == 9)) showTime[1] = "++";
  }
  if (moveTime[0] == 1 && moving) {
    if (millis() - moveStartTime >= 5000 || sideCounter > 200) {
      if (!sideMoving) {
        if (!mouseIsPressed) currSideToX = width - int(map(positions[0][0], 0, 640, 0, width));
        else currSideToX = mouseX;
        console.log(currSideToX, width, mouseX);
      }
      sideMoving = true;
 
      xShift = int((width/2 - currSideToX) * function_PennerEaseOutElastic(sideMoveTime));
 
      sideMoveTime += 0.0025;
      if (sideMoveTime > 1) sideMoveTime = 1;
    }
  } else if (!moving && sideMoving) {
    xShift = int((width/2 - currSideToX) * function_PennerEaseOutElastic(sideMoveTime));
 
    sideMoveTime -= 0.0025;
    if (sideMoveTime < 0) sideMoving = false;
  }
 
  push();
  translate(xShift, y[0]);
  text(showTime[1], 0, 0);
  pop();
 
  if ((moveTime[0] <= 0.470 && moving) || (moveTime[0] <= 0.2 && !moving)) {
    push();
    translate(0, y[1]);
    text(":", -50, 0);
    pop();
 
    push();
    translate(0, y[1]);
    text(":", 50, 0);
    pop();
 
    push();
    translate(0, y[2]);
    text(showTime[0], -100, 0);
    pop();
 
    push();
    translate(0, y[2]);
    text(showTime[2], 100, 0);
    pop();
  }
 
  pop();
 
  //if (stutterDelay) delay(500);
}
 
function updateMils(S) {
  if (prevSec != S) {
    millisRolloverTime = millis();
  }
  prevSec = S;
  mils = floor(millis() - millisRolloverTime);
}
 
// From https://github.com/golanlevin/Pattern_Master converted from Java
function function_PennerEaseOutElastic(t) {
  if (t==0) return 0.0; 
  if (t==1) return 1.0;
  var  p = 0.3;
  var  s = p/4;
 
  return (pow(2, -10*t) * sin( (t-s)*(2*PI)/p ) + 1);
}

sapeck-Reading02

  1. I appreciate the beauty and effective complexity of a sunset. Every sunset is affected by the shapes, sizes, and positions of the cloud, the position of the sun in relation to the earth, and where you are on earth. A sunset is much closer to total randomness than total order--every factor changes sporadically.
  2. I have struggled with that Galanter labels as The Problem of Meaning. Nearly all of my computer-based art is created with an image of the final product in my head. I find it much more difficult to find emotion in something that I have created than it is to create something "forced." Creating something aesthetically pleasing seems easy when the conceptual aspect is mostly ignored.

sapeck-AnimatedLoop


1920x1920 H.264 (MP4) video version (2.8 MB)

I began this piece by mapping my pixel art from my sketchbook into a spreadsheet. This gave me a grid of 0's and 1's to create my stick figure bitmaps. I gave each row the same Generalized Blackman Window easing function but with slightly different parameters based based on Perlin noise. The differing parameters adds the "lagging" effect to the animation. The Blackman Window function starts with a slowly-ramping curve that then spikes and returns slows down. I feel that the animation is a little boring. A continuous scroll of five or so figures would be more interesting. The background gradient reminds me more of Nyan Cat than I want it to. A single-tone gradient for each row would be less eye-straining and just as appealing.

/* Sapeck    9/12/2017
"sapeck-AnimatedLoop"
Based on a template for creating a looping animation in Processing/Java by Prof. Golan Levin, January 2018
60-212                        Carnegie Mellon University
Copyright (C) 2018-present  Sapeck, Prof. Golan Levin
This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License version 3 as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
*/
 
//===================================================
// Global variables. 
String  myNickname = "sapeck"; 
int     nFramesInLoop = 500;
int     nElapsedFrames;
boolean bRecording; 
 
//===================================================
void setup() {
  size (640, 640);
  noiseSeed(283092);
  colorMode(HSB, 100);
  bRecording = false;
  nElapsedFrames = 0;
}
//===================================================
void keyPressed() {
  if ((key == 'f') || (key == 'F')) {
    bRecording = true;
    nElapsedFrames = 0;
  }
}
 
//===================================================
void draw() {
 
  // Compute a percentage (0...1) representing where we are in the loop.
  float percentCompleteFraction = 0; 
  if (bRecording) {
    percentCompleteFraction = (float) nElapsedFrames / (float)nFramesInLoop;
  } else {
    percentCompleteFraction = (float) (frameCount % nFramesInLoop) / (float)nFramesInLoop;
  }
 
  // Render the design, based on that percentage. 
  renderMyDesign (percentCompleteFraction);
 
  // If we're recording the output, save the frame to a file. 
  if (bRecording) {
    saveFrame("frames"+width+"/" + myNickname + "_frame_" + nf(nElapsedFrames, 4) + ".png");
    nElapsedFrames++; 
    if (nElapsedFrames >= nFramesInLoop) {
      bRecording = false;
    }
  }
}
 
int DIMENSION = 20;
int colorFilled = color(0,0,100,255);
int LOOPS = 3;
PVector easeCurveBase = new PVector(0.1, 0);
int NOISESEED = 283092;
 
void renderMyDesign (float percent) {
  background(0);
  smooth();
 
  for (int y=0;y<DIMENSION;y++) {
    pushMatrix();
    boolean right = (y % 2 == 0);
    right = false;
    int moveX = 0;
    int startRow = 0;
    PVector thisEaseCurve = new PVector(easeCurveBase.x+0.3*noise(y*10)/1, easeCurveBase.y+noise(y*10)/1);
    if (y > (DIMENSION-1)/2) thisEaseCurve = new PVector(easeCurveBase.x-0.3*noise(y*10)/1, easeCurveBase.y-noise(y*10)/1);
    float thisPercent = percent;
    thisPercent = function_GeneralizedBlackmanWindow(percent,thisEaseCurve.x);
    if (right) {
      startRow = -1*(LOOPS-1)*width;
      moveX = int(thisPercent*(LOOPS-1)*width);
    } else {
      moveX = int(-1*thisPercent*(LOOPS-1)*width);
    }
    translate(moveX ,0);
    for (int loop=0;loop<LOOPS;loop++) {
      for (int x=0;x<DIMENSION;x++) {
        int thisBox = 0;
        if (loop == LOOPS-1 && right) thisBox = MAN1[y][x];
        else if (loop == LOOPS-1 && !right) thisBox = MAN2[y][x];
        else if (loop == 0 && right) thisBox = MAN2[y][x];
        else if (loop == 0 && !right) thisBox = MAN1[y][x];
        PVector thisLoc = new PVector(x*(width/DIMENSION)+(loop*width)+startRow, y*(height/DIMENSION));
        if (thisBox == 1) {
          fill(colorFilled);
          rect(thisLoc.x, thisLoc.y, width/DIMENSION, height/DIMENSION);
        } else {
          int colorX = x+y;
          if (colorX > DIMENSION) colorX -= DIMENSION;
          fill(color(map(colorX,0,DIMENSION,0,100),
                     100,
                     100));
          rect(thisLoc.x, thisLoc.y, width/DIMENSION, height/DIMENSION);
        }
      }
    }
    popMatrix();
  }
}
 
int[][] MAN1 = {
  {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0},
  {0,0,0,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,0,0,1,0,0,0,1,1,1,0,0,0,0},
  {0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}
};
int[][] MAN2 = {
  {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,0,1,1,1,0,0,1,1,1,0,0,0,0},
  {0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0},
  {0,0,0,1,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0},
  {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}
};
 
 
//===================================================
// Taken from https://github.com/golanlevin/Pattern_Master
float function_GeneralizedBlackmanWindow (float x, float a) {
  // http://en.wikipedia.org/wiki/Window_function
  float a0 = (1.0 - a)/2.0;
  float a1 = 0.5;
  float a2 = a / 2.0;
 
  float pix = PI*x;
  float y = a0 - a1*cos(2*pix) + a2*cos(4*pix);
  return y;
}

sapeck-Scope

My design is a simple yet slightly humorous attempt to animate an emoji. The head and eyes move with in a sinusoidal manner, and the tongue stays stationary but lengthens in accordance with the frame number.

sapeck-praxinoscope-output (PDF download)

/* Sapeck    9/12/2017
"sapeck-Scope"
60-212                        Carnegie Mellon University
Copyright (C) 2018-present  Sapeck
This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License version 3 as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
*/
void drawArtFrame (int whichFrame) { 
  pushMatrix();
  // move the head up and down sinusoidally
  translate(0, -10+30*sin(map(whichFrame, 0, 10, 0, 6)));
 
  // draw the head
  fill(0);
  ellipse(0, 0, 50, 50);
 
  // draw the eye sockets
  fill(255);
  ellipse(-10, -10, 20, 20);
  ellipse(10, -10, 20, 20);
 
  // draw the eyes
  fill(0);
  int eyeSize = 6+int(6*sin(map(whichFrame, 0, 10, 6, 2)));
  ellipse(-10, -10, eyeSize, eyeSize);
  ellipse(10, -10, eyeSize, eyeSize);
 
  // draw the tongue
  fill(color(255,0,0,255));
  rect(-10, 10, 20, 10+2*whichFrame, 7);
 
  popMatrix();
}