Turn audio into a shareable video. forked from nypublicradio/audiogram

index.js 4.7KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207
  1. var path = require("path"),
  2. queue = require("d3").queue,
  3. mkdirp = require("mkdirp"),
  4. rimraf = require("rimraf"),
  5. serverSettings = require("../settings/"),
  6. transports = require("../lib/transports/"),
  7. logger = require("../lib/logger/"),
  8. getDuration = require("./duration.js"),
  9. getWaveform = require("./waveform.js"),
  10. initializeCanvas = require("./initialize-canvas.js"),
  11. drawFrames = require("./draw-frames.js"),
  12. combineFrames = require("./combine-frames.js"),
  13. trimAudio = require("./trim.js");
  14. function Audiogram(settings) {
  15. // Unique audiogram ID
  16. this.id = settings.id;
  17. this.settings = settings;
  18. // File locations to use
  19. this.dir = path.join(serverSettings.workingDirectory, this.id);
  20. this.audioPath = path.join(this.dir, "audio");
  21. this.videoPath = path.join(this.dir, "video.mp4");
  22. this.frameDir = path.join(this.dir, "frames");
  23. return this;
  24. }
  25. // Probe an audio file for its duration, compute the number of frames required
  26. Audiogram.prototype.getDuration = function(cb) {
  27. var self = this;
  28. this.status("duration");
  29. getDuration(this.audioPath, function(err, duration){
  30. if (err) {
  31. return cb(err);
  32. }
  33. if (self.settings.maxDuration && self.settings.maxDuration < duration) {
  34. cb("Exceeds max duration of " + self.settings.maxDuration + "s");
  35. }
  36. self.set("numFrames", self.numFrames = Math.floor(duration * self.settings.framesPerSecond));
  37. cb(null);
  38. });
  39. };
  40. // Get the waveform data from the audio file, split into frames
  41. Audiogram.prototype.getWaveform = function(cb) {
  42. var self = this;
  43. this.status("waveform");
  44. getWaveform(this.audioPath, {
  45. numFrames: this.numFrames,
  46. samplesPerFrame: this.settings.samplesPerFrame
  47. }, function(err, waveform){
  48. return cb(err, self.settings.waveform = waveform);
  49. });
  50. };
  51. // Trim the audio by the start and end time specified
  52. Audiogram.prototype.trimAudio = function(start, end, cb) {
  53. var self = this;
  54. this.status("trim");
  55. // FFmpeg needs an extension to sniff
  56. var trimmedPath = this.audioPath + "-trimmed.mp3";
  57. trimAudio({
  58. origin: this.audioPath,
  59. destination: trimmedPath,
  60. startTime: start,
  61. endTime: end
  62. }, function(err){
  63. if (err) {
  64. return cb(err);
  65. }
  66. self.audioPath = trimmedPath;
  67. return cb(null);
  68. });
  69. };
  70. // Initialize the canvas and draw all the frames
  71. Audiogram.prototype.drawFrames = function(cb) {
  72. var self = this;
  73. this.status("renderer");
  74. initializeCanvas(this.settings, function(err, renderer){
  75. if (err) {
  76. return cb(err);
  77. }
  78. self.status("frames");
  79. drawFrames(renderer, {
  80. numFrames: self.numFrames,
  81. frameDir: self.frameDir,
  82. tick: function() {
  83. transports.incrementField(self.id, "framesComplete");
  84. }
  85. }, cb);
  86. });
  87. };
  88. // Combine the frames and audio into the final video with FFmpeg
  89. Audiogram.prototype.combineFrames = function(cb) {
  90. this.status("combine");
  91. combineFrames({
  92. framePath: path.join(this.frameDir, "%06d.png"),
  93. audioPath: this.audioPath,
  94. videoPath: this.videoPath,
  95. framesPerSecond: this.settings.framesPerSecond
  96. }, cb);
  97. };
  98. // Master render function, queue up steps in order
  99. Audiogram.prototype.render = function(cb) {
  100. var self = this,
  101. q = queue(1);
  102. this.status("audio-download");
  103. // Set up tmp directory
  104. q.defer(mkdirp, this.frameDir);
  105. // Download the stored audio file
  106. q.defer(transports.downloadAudio, "audio/" + this.id, this.audioPath);
  107. // If the audio needs to be clipped, clip it first and update the path
  108. if (this.settings.start || this.settings.end) {
  109. q.defer(this.trimAudio.bind(this), this.settings.start || 0, this.settings.end);
  110. }
  111. // Get the audio's duration for computing number of frames
  112. q.defer(this.getDuration.bind(this));
  113. // Get the audio waveform data
  114. q.defer(this.getWaveform.bind(this));
  115. // Draw all the frames
  116. q.defer(this.drawFrames.bind(this));
  117. // Combine audio and frames together with ffmpeg
  118. q.defer(this.combineFrames.bind(this));
  119. // Upload video to S3 or move to local storage
  120. q.defer(transports.uploadVideo, this.videoPath, "video/" + this.id + ".mp4");
  121. // Delete working directory
  122. q.defer(rimraf, this.dir);
  123. // Final callback, results in a URL where the finished video is accessible
  124. q.await(function(err){
  125. if (!err) {
  126. self.set("url", transports.getURL(self.id));
  127. }
  128. return cb(err);
  129. });
  130. return this;
  131. };
  132. Audiogram.prototype.set = function(field, value) {
  133. logger.debug(field + "=" + value);
  134. transports.setField(this.id, field, value);
  135. return this;
  136. };
  137. // Convenience method for .set("status")
  138. Audiogram.prototype.status = function(value) {
  139. return this.set("status", value);
  140. };
  141. module.exports = Audiogram;