Turn audio into a shareable video. forked from nypublicradio/audiogram

index.js 4.7KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208
  1. var path = require("path"),
  2. queue = require("d3").queue,
  3. mkdirp = require("mkdirp"),
  4. rimraf = require("rimraf"),
  5. serverSettings = require("../settings/"),
  6. transports = require("../lib/transports/"),
  7. logger = require("../lib/logger/"),
  8. probe = require("./probe.js"),
  9. getWaveform = require("./waveform.js"),
  10. initializeCanvas = require("./initialize-canvas.js"),
  11. drawFrames = require("./draw-frames.js"),
  12. combineFrames = require("./combine-frames.js"),
  13. trimAudio = require("./trim.js");
  14. function Audiogram(settings) {
  15. // Unique audiogram ID
  16. this.id = settings.id;
  17. this.settings = settings;
  18. // File locations to use
  19. this.dir = path.join(serverSettings.workingDirectory, this.id);
  20. this.audioPath = path.join(this.dir, "audio");
  21. this.videoPath = path.join(this.dir, "video.mp4");
  22. this.frameDir = path.join(this.dir, "frames");
  23. return this;
  24. }
  25. // Probe an audio file for its duration and # of channels, compute the number of frames required
  26. Audiogram.prototype.probe = function(cb) {
  27. var self = this;
  28. this.status("probing");
  29. probe(this.audioPath, function(err, data){
  30. if (err) {
  31. return cb(err);
  32. }
  33. if (self.settings.maxDuration && self.settings.maxDuration < data.duration) {
  34. cb("Exceeds max duration of " + self.settings.maxDuration + "s");
  35. }
  36. self.set("numFrames", self.numFrames = Math.floor(data.duration * self.settings.framesPerSecond));
  37. self.channels = data.channels;
  38. cb(null);
  39. });
  40. };
  41. // Get the waveform data from the audio file, split into frames
  42. Audiogram.prototype.getWaveform = function(cb) {
  43. var self = this;
  44. this.status("waveform");
  45. getWaveform(this.audioPath, {
  46. numFrames: this.numFrames,
  47. samplesPerFrame: this.settings.samplesPerFrame
  48. }, function(err, waveform){
  49. return cb(err, self.settings.waveform = waveform);
  50. });
  51. };
  52. // Trim the audio by the start and end time specified
  53. Audiogram.prototype.trimAudio = function(start, end, cb) {
  54. var self = this;
  55. this.status("trim");
  56. // FFmpeg needs an extension to sniff
  57. var trimmedPath = this.audioPath + "-trimmed.mp3";
  58. trimAudio({
  59. origin: this.audioPath,
  60. destination: trimmedPath,
  61. startTime: start,
  62. endTime: end
  63. }, function(err){
  64. if (err) {
  65. return cb(err);
  66. }
  67. self.audioPath = trimmedPath;
  68. return cb(null);
  69. });
  70. };
  71. // Initialize the canvas and draw all the frames
  72. Audiogram.prototype.drawFrames = function(cb) {
  73. var self = this;
  74. this.status("renderer");
  75. initializeCanvas(this.settings, function(err, renderer){
  76. if (err) {
  77. return cb(err);
  78. }
  79. self.status("frames");
  80. drawFrames(renderer, {
  81. numFrames: self.numFrames,
  82. frameDir: self.frameDir,
  83. tick: function() {
  84. transports.incrementField(self.id, "framesComplete");
  85. }
  86. }, cb);
  87. });
  88. };
  89. // Combine the frames and audio into the final video with FFmpeg
  90. Audiogram.prototype.combineFrames = function(cb) {
  91. this.status("combine");
  92. combineFrames({
  93. framePath: path.join(this.frameDir, "%06d.png"),
  94. audioPath: this.audioPath,
  95. videoPath: this.videoPath,
  96. framesPerSecond: this.settings.framesPerSecond
  97. }, cb);
  98. };
  99. // Master render function, queue up steps in order
  100. Audiogram.prototype.render = function(cb) {
  101. var self = this,
  102. q = queue(1);
  103. this.status("audio-download");
  104. // Set up tmp directory
  105. q.defer(mkdirp, this.frameDir);
  106. // Download the stored audio file
  107. q.defer(transports.downloadAudio, "audio/" + this.id, this.audioPath);
  108. // If the audio needs to be clipped, clip it first and update the path
  109. if (this.settings.start || this.settings.end) {
  110. q.defer(this.trimAudio.bind(this), this.settings.start || 0, this.settings.end);
  111. }
  112. // Get the audio's duration for computing number of frames
  113. q.defer(this.probe.bind(this));
  114. // Get the audio waveform data
  115. q.defer(this.getWaveform.bind(this));
  116. // Draw all the frames
  117. q.defer(this.drawFrames.bind(this));
  118. // Combine audio and frames together with ffmpeg
  119. q.defer(this.combineFrames.bind(this));
  120. // Upload video to S3 or move to local storage
  121. q.defer(transports.uploadVideo, this.videoPath, "video/" + this.id + ".mp4");
  122. // Delete working directory
  123. q.defer(rimraf, this.dir);
  124. // Final callback, results in a URL where the finished video is accessible
  125. q.await(function(err){
  126. if (!err) {
  127. self.set("url", transports.getURL(self.id));
  128. }
  129. return cb(err);
  130. });
  131. return this;
  132. };
  133. Audiogram.prototype.set = function(field, value) {
  134. logger.debug(field + "=" + value);
  135. transports.setField(this.id, field, value);
  136. return this;
  137. };
  138. // Convenience method for .set("status")
  139. Audiogram.prototype.status = function(value) {
  140. return this.set("status", value);
  141. };
  142. module.exports = Audiogram;