dinkolas-book

Bioinvasive Dingus

The U.S. judicial branch, bioinvasion, war, and sociology collide with the vocabularies of Luis Carroll and Steve Brule in their first and last ever mash-up.
Here is a .zip containing 25 iterations of 10-page chapters:

https://drive.google.com/file/d/1PfSEv24RcGyA8eCPXGnYXw5h3YIFgONi/view?usp=sharing

The text portion of this project was generated using a combination of Markov chains. First, the text body was generated from a corpus of a series of academic papers on serious subjects ranging from Supreme Court decisions to changing migration habits. These papers were selected from the MICUSP database of student papers. The Markov chain had an n-gram length of 4, and was word based.

Next, random nouns were selected from the text to be replaced with other generated words. The replacement words were generated letter by letter with an n-gram length of 2. They were generated from Luis Carroll's Jabberwocky and transcripts of Check It Out! With Doctor Steve Brule. These words in isolation can be read and heard here by clicking to generate a new word: https://editor.p5js.org/dinkolas/full/ryIcv99aX

The resultant text is a mishmash of technical jargon, actual nonsensical words, and serious problems with the world that are obscured by a dense dialect. Finally, images were generated by rotating and superimposing images from Yale's face database, and overlaying selected words from the "glossary" of generated words. These images are strung through the text, breaking it up, making it even more difficult to read line to line. Visually and textually, the generated nonsensical words make it almost impossible to parse the discussion of national and global crises.

Here's the code for the text:

var dingusGen, input, markBody, final
var bigJSON = { versions: [{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] },{pages: [] }] };
 
function preload()
{
  dingus = loadStrings('dingus.txt');
  input = loadStrings('input1.txt'); 
}
 
function setup() { 
  createCanvas(500,400);
  textSize(12);
  textAlign(LEFT);
	dingusGen = new Markov(2, dingus.join(" ").split(""), " ", 100);
  markBody = new RiMarkov(4);
  markBody.loadText(input.join(' '));
 
  for (var version = 0; version < 25; version++)
  {
    for (var page = 0; page < 10; page++)
    {
      bigJSON.versions[version].pages[page] = genText(version, page);
    }
  }
  saveJSON(bigJSON, "final1.json");
}
 
function genText(version, page) {
  background(255);
  var final = RiTa.tokenize(markBody.generateSentences(20).join(' '));
  var glossary = [];
 
  for (var i = 0; i < final.length; i++)
  {
    if (RiTa.isNoun(final[i]) && random(1) < 0.5)
    {
      var result = dingusGen.generate().join("")
      while (result.length < final[i].length || result.length > final[i].length+5)
      {
        result = dingusGen.generate().join("")
      }
      if (final[i].charAt(0) == final[i].charAt(0).toUpperCase())
      {
        result = result.charAt(0).toUpperCase() + result.slice(1);
      }
      if (random(1) < 0.2)
      {
       	result = RiTa.pluralize(result); 
      }
      if (random(1) < 0.2)
      {
        glossary.push(result.charAt(0).toUpperCase() + result.slice(1)); 
      }
 
      final[i] = result;
    }
  }
 
  var thisJSON = {}; 
  thisJSON.glossary = glossary;
  thisJSON.text = RiTa.untokenize(final);
  //text(version + "_" + page, 25, 25);
  text(RiTa.untokenize(final), 50, 50, 400, 400);
  return(thisJSON);
}
 
function Markov(n, input, end, maxLen) {
	this.n = n;
  this.ngrams = [];
  this.tokens = [];
  this.end = end;
  this.maxLen = maxLen;
  this.N = 0;
  this.indexFromGram = function(gram) {
    var index = 0;
    for (var i = 0; i < n; i++)
    {
      index += this.tokens.indexOf(gram[i]) * pow(this.tokens.length, n - i - 1);
    }
    return index;
  }
  this.indexToGram = function(index) {
    var gram = [];
    for (var i = 0; i < n; i++)
    {
      gram.unshift(this.tokens[(Math.floor(index/(pow(this.tokens.length, i)))) % (this.tokens.length)]);
    }
    return gram;
  }
 
  for (var i = 0; i < input.length; i++)
  {
    if (!(this.tokens.includes(input[i])))
    {
      this.tokens.push(input[i]);
    }
  }
 
  for (i = 0; i < pow(this.tokens.length, n); i++)
  {
    this.ngrams.push(0);
  }
 
  var gram = [];
  for (i = 0; i < input.length - n + 1; i++)
  {
    gram = []
    for (var j = 0; j < n; j++)
    {
      gram.push(input[i + j]);
    }
    this.ngrams[this.indexFromGram(gram)] ++;
  }
 
  for (i = 0; i < this.ngrams.length; i++)
  {
    this.N += this.ngrams[i];
  }
 
 
  this.seed = function() {
    var randInd = Math.floor(random(this.N));
    var n = 0;
    for (var i = 0; i < this.ngrams.length; i++) { n += this.ngrams[i]; if (n > randInd) 
      {
        return this.indexToGram(i);
      }
    }
    print("seed is fucked");
    return [];
  }
 
  this.nextToken = function(gram) {
    gram.push(this.tokens[0]);
    var index0 = this.indexFromGram(gram);
    var N = 0;
    for (var i = 0; i < this.tokens.length; i++)
    {
      N += this.ngrams[index0 + i];
    }
    var n = 0;
    var randInd = Math.floor(random(N));
    for (i = 0; i < this.tokens.length; i++) { n += this.ngrams[index0 + i]; if (n > randInd) return this.tokens[i];
    }
    print("nextToken is fucked");
    print(gram);
    return 0;
  }
 
  this.generate = function() {
    var out = this.seed();
    //print("out", out);
    var i = 0;
    while (out.includes(this.end) && i < this.maxLen)
    {
    	out = this.seed();
      i++
    }
    i = 0;
    while (out[out.length - 1] != this.end && i < this.maxLen)
    {
      out.push(this.nextToken(out.slice(out.length - n + 1, out.length)));
      i++;
    }
    return out.splice(0,out.length-1);
  }
}

And the code for the images:

var book;
var images = []
var img;
var offset;
var c;
var wordsSoFar;
 
function preload() {
  for (var i = 1; i <= 230; i+=10) { var filename; if (i >= 100){ filename = "" + i +".png";}
    else if (i >= 10){filename = "0" + i + ".png";}
    else {filename = "00" + i + ".png";}
    images.push(loadImage("images/"+filename));
  }
  book = loadJSON('bigBoyFinal1.json');
  wordsSoFar = loadStrings('wordsSoFar.txt');
}
 
function setup() {
  c = createCanvas(300, 300);
  //imageMode(CENTER);
  textAlign(CENTER);
  textSize(40);
  background(200);
  var count = 0;
  print(book.versions[0].pages[0].glossary);
  for (var v = 0; v < book.versions.length; v++)
  {
    for (var p = 0; p < book.versions[v].pages.length; p++)
    {
      for (var w = 0; w < book.versions[v].pages[p].glossary.length; w++)
      {
        genImage(book.versions[v].pages[p].glossary[w]);
      }
    }
  }
}
 
function genImage(word) {
  background(255);
  blendMode(BLEND);
  var offset = 1;
  for (var i = 0; i < word.length/2; i++)
  {
    push();
    translate(150,150);
    rotate(random(360));
    translate(-150,-150);
    tint(255,127);
    image(images[Math.floor(random(images.length))], 0, 0, width, height*offset);
    blendMode(MULTIPLY);
    if (i % 2 == 0) blendMode(ADD);
    offset += 0.1;
    pop();
  }
  saveCanvas(c, Math.floor(random(50,100)), "jpg");
}

And the code for the PDF generation in basil.js:

#includepath "~/Documents/;%USERPROFILE%Documents";
 
#include "basiljs/bundle/basil.js";
 
// to run this example 
 
 
function draw() 
{
    console.log("hello");
 
	var dpi = 72;
	b.textSize(9);
	var json = b.JSON.decode(b.loadString("bigBoyFinal1.json"));
	b.clear(b.doc());
	for (var v = 0; v < json.versions.length; v++)
	{
		b.page(1);
		var wSum = 0;
		b.clear(b.doc());
		for (var p = 0; p < json.versions[v].pages.length; p++)
		{
			b.noStroke();
			b.fill(0);
			b.textFont('Gadugi', 'Regular');
			var currentText = b.text(json.versions[v].pages[p].text, 1*dpi, 1*dpi, 4*dpi, 7*dpi);
			var i = 0;
			for (var word in b.words(currentText))
			{
				if (word in json.versions[v].pages[p].glossary)
				{
					b.typo(b.words(currentText)[i], 'appliedFont', 'Gadugi\tBold');
				}
				i++;
			}
 
			for (var w = 0; w < json.versions[v].pages[p].glossary.length; w++)
			{
 
				wSum++;
				var x = 3*dpi + 2*dpi*b.sin(wSum/3);
				var y = b.map(w, 0, json.versions[v].pages[p].glossary.length, 1.2*dpi, 8.2*dpi);
				b.noFill();
				var wrapper = b.ellipse(x, y, 0.9*dpi+0.2*dpi*b.sin(wSum/4), 0.9*dpi+0.2*dpi*b.sin(wSum/4));
				wrapper.textWrapPreferences.textWrapMode = TextWrapModes.CONTOUR;
				var circle = b.ellipse(x, y, 0.75*dpi+0.2*dpi*b.sin(wSum/4), 0.75*dpi+0.2*dpi*b.sin(wSum/4));
				try {
					var imgCircle = b.image("FaceImgBgs/" + Math.floor(b.random(73)) + ".jpg", circle);
				}
				catch(error) {
					b.fill(b.random(10,70));
					b.ellipse(x, y, 0.75*dpi+0.2*dpi*b.sin(wSum/4), 0.75*dpi+0.2*dpi*b.sin(wSum/4));
				}
				b.fill(255,255,255);
				b.textAlign(Justification.CENTER_ALIGN);
				var myText = b.text(json.versions[v].pages[p].glossary[w],x-1*dpi,y-0.04*dpi,2*dpi,0.5*dpi);
				myText.textFramePreferences.ignoreWrap = true;
				b.textAlign(Justification.LEFT_ALIGN);
			}
			if (v == 0) 
			{
				if (p < json.versions[v].pages.length - 1){b.addPage();}
			}
			else
			{
				if (p < json.versions[v].pages.length - 1){b.page(p+2)}
			}
		}
		//if (v < 10){ b.savePDF("0" + v + "_dinkolas.pdf", false);}
		//else { b.savePDF(v + "_dinkolas.pdf", false);}
 
	}
}
 
 
 
b.go();