Skip to content

Instantly share code, notes, and snippets.

@alexmacy
Last active December 3, 2016 06:57
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save alexmacy/f25c39867546f8c72af13cb7e5bc654a to your computer and use it in GitHub Desktop.
Save alexmacy/f25c39867546f8c72af13cb7e5bc654a to your computer and use it in GitHub Desktop.
I Am Sitting in a Room - Alvin Lucier
license: mit

This is an experiment in manipulating audio at the bit level, using the Web Audio API. It's also a kind of homage to a Alvin Lucier's I am Sitting in a Room. The original was created by re-recording an audio track while it was being played back into the room it was originally recorded. The main effect was that the room's resonant frequencies became much more prominent with each iteration, resulting in something that doesn't sound at all like the original recording.

I cropped his 'clean' recording for the sake of better performance in the browser, and preselected some delay offsets to create a similar effect. Instead of responding to resonant frequencies, this introduces four offset copies of the track every time it repeats.

You can skip ahead by clicking the 'Apply Effect' button, but beware, it can get very loud!

Also see: Phase Shifting

<!DOCTYPE html>
<html>
<head>
<style>
button {margin-right: 10px}
input {margin-left: 10px}
path {fill: none; stroke: black; stroke-width: .5;}
.position {fill: red; fill-opacity: .5; visibility: hidden;}
.zoom {fill: none; pointer-events: all;}
</style>
<script src="//d3js.org/d3.v4.min.js"></script>
</head>
<body>
<div>
<button id="apply-delay" onclick="step()">Apply Effect</button>
<button id="reset" onclick="reset()">Reset</button>
<button id="start-stop" onclick="playing ? stopSound() : playSound()">Start</button>
<input type="checkbox" onclick="stepping = !stepping" checked/>Step when Looping
</div>
<svg></svg>
</body>
<script>
var width = Math.max(940, innerWidth-20),
height = 470,
focusHeight = 200,
contextHeight = 50,
sampRateAdj,
origBuffer;
var delayVals = [401, 1201, 3101, 5001]
var audioCtx = new (window.AudioContext || window.webkitAudioContext)(),
beatData, source, playing, stepping = true;
var svg = d3.select("svg")
.attr("width", width)
.attr("height", height)
var x1 = d3.scaleLinear().range([0, width]),
x2 = d3.scaleLinear().range([0, width]),
y1 = d3.scaleLinear().range([focusHeight, 0]),
y2 = d3.scaleLinear().range([contextHeight, 0]);
var normalizeScale = d3.scaleLinear().range([-1,1]);
var brush = d3.brushX()
.extent([[0, 0], [width, contextHeight]])
.on("brush end", brushed);
var zoom = d3.zoom()
.scaleExtent([1, Infinity])
.translateExtent([[0, 0], [width, focusHeight]])
.extent([[0, 0], [width, focusHeight]])
.on("zoom", zoomed);
var wave1 = d3.line()
.curve(d3.curveMonotoneX)
.x(function(d, i) {return x1(i)})
.y(function(d) {return y1(d)})
var wave2 = d3.line()
.curve(d3.curveMonotoneX)
.x(function(d, i) {return x2(i)})
.y(function(d) {return y2(d)})
var focus = svg.append("g")
.attr("id", "focus")
var context = svg.append("g")
.attr("id", "context")
.attr("transform", "translate(0," + (focusHeight + 30) + ")");
context.append("text")
.text("Original wave:")
.attr("transform", "translate(0,20)");
var waveShape1 = focus.append("path")
.datum([])
.attr("d", wave1)
.attr("transform", "translate(0,30)");
var waveShape2 = context.append("path")
.attr("transform", "translate(0,30)");
var position = context.append("rect")
.attr("class", "position")
.attr("y", 30)
.attr("width", 10)
.attr("height", contextHeight)
var brushRect = context.append("g")
.attr("class", "brush")
.attr("transform", "translate(0,30)")
.call(brush)
.call(brush.move, x1.range());
var zoomRect = svg.append("rect")
.attr("class", "zoom")
.attr("width", width)
.attr("height", focusHeight)
.attr("transform", "translate(0,30)")
.call(zoom)
importAudio("//raw.githubusercontent.com/alexmacy/Audio-clips/master/sitting.wav")
function brushed() {
if (d3.event.sourceEvent && d3.event.sourceEvent.type === "zoom") return;
var s = d3.event.selection || x2.range();
x1.domain(s.map(x2.invert, x2));
waveShape1.attr("d", wave1);
svg.select(".zoom").call(zoom.transform, d3.zoomIdentity
.scale(width / (s[1] - s[0]))
.translate(-s[0], 0));
}
function zoomed() {
if (d3.event.sourceEvent && d3.event.sourceEvent.type === "brush") return;
var t = d3.event.transform;
x1.domain(t.rescaleX(x2).domain());
waveShape1.attr("d", wave1);
context.select(".brush").call(brush.move, x1.range().map(t.invertX, t));
}
function importAudio(url) {
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = function() {
audioCtx.decodeAudioData(request.response, function(buffer) {
loadAudio(buffer);
},
function(){alert("Error decoding audio data")}
);
}
request.send();
function loadAudio(buffer) {
beatData = buffer;
origBuffer = new Float32Array(beatData.length)
origBuffer = [...beatData.getChannelData(0)]
normalize();
sampRateAdj = Math.floor(beatData.length/(width*2));
var waveData = drawWithAvgs(0)
x1.domain([0, waveData.length]);
y1.domain(d3.extent(beatData.getChannelData(0)));
x2.domain(x1.domain());
y2.domain(y1.domain());
waveShape1.datum(waveData).attr("d", wave1)
waveShape2.datum(waveData).attr("d", wave2)
}
}
function playSound() {
d3.select("#start-stop").text("stop")
playing = true;
source = audioCtx.createBufferSource();
source.buffer = beatData;
source.connect(audioCtx.destination);
source.start();
position.style("visibility", "visible")
.transition().duration(0)
.attr("x", 0)
.transition().duration(beatData.duration*1000).ease(d3.easeLinear)
.attr("x", width)
.on("end", function() {
if (playing) {
playSound();
if (stepping) step();
}
})
}
function step() {
var tempBuffer = new Float32Array;
tempBuffer = [...beatData.getChannelData(0)]
for (n in delayVals) {
for (i=0; i<beatData.length; i++) {
beatData.getChannelData(0)[i] += tempBuffer[(i - delayVals[n] + beatData.length) % beatData.length]/10;
}
}
normalize();
y1.domain(d3.extent(beatData.getChannelData(0)));
waveShape1.datum(drawWithAvgs(0)).attr("d", wave1)
}
function normalize() {
normalizeScale.domain(d3.extent(beatData.getChannelData(0)));
for (i=0; i<beatData.length; i++) {
beatData.getChannelData(0)[i] = normalizeScale(beatData.getChannelData(0)[i])
}
}
function drawWithAvgs(channel) {
var Avgs = []
for (i=0; i<beatData.length - sampRateAdj; i += sampRateAdj) {
Avgs.push(d3.mean(beatData.getChannelData(channel).slice(i, i+sampRateAdj)))
}
return Avgs
}
function stopSound() {
d3.select("#start-stop").text("start")
position.interrupt().style("visibility", "hidden")
playing = false;
if (source) {source.stop();}
}
function reset() {
for (i in beatData.getChannelData(0)) {
beatData.getChannelData(0)[i] = origBuffer[i]
}
y1.domain(d3.extent(beatData.getChannelData(0)));
waveShape1.datum(drawWithAvgs(0)).attr("d", wave1)
}
</script>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment