Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

Nightmare.js screenshot buffer length 0

I'm running a nightmare.js script where where I'm trying to take a screenshot of multiple elements on a page.

The first element is captured just fine, but every other element that is below the fold is captured with a zero length. I am struggling to debug this issue. Any help would be incredibly appreciated.

Basically this script walks through a page and selects all the elements on the page that match a selector. Then, using async it collects the responses and returns a buffer of objects. The issue is that the elements below the fold do not get screenshotted (buffer length ends up at zero). I tried to wait() and scroll to the element, but I have not had any success as of yet.

import * as Nightmare from 'nightmare'
import * as vo from 'vo'
import * as async from 'async'
import * as fs from 'fs'

const urls:String[] = [
  'https://yahoo.com/'
]


Nightmare.action('snap', function(selector:String, done:Function) {
  const self = this;

  this.evaluate_now(function (selector) {
    return Array.from(document.querySelectorAll(selector))
    .map((ele:Element) => {
      if (ele) {
        const rect = ele.getBoundingClientRect()
        const r:Function = Math.round
        return {
          x: r(rect.left),
          y: r(rect.top),
          width: r(rect.width),
          height: r(rect.height)
        }
      }
    })
  }, function(err, clips) {
    if (err) return done(err)
    if (!clips) return done(new Error(`Selector not found`))
    let snaps = []
    const snap = (clip, cb) => {
      self
        .scrollTo(clip.y - clip.height, clip.x)
        .screenshot(clip, cb)
        .run()
    }
    async.mapSeries(clips.reverse(), snap, (err, res) => {
      done(err, res)
    })
  }, selector)
})

const scrape = (url) => {
  const nightmare = Nightmare({
    show: true
  });
  nightmare
    .goto(url)
    .snap('.navbar')
    .end()
    .then((buffers:Buffer[]) => {
      buffers.forEach((data, index) => {
        fs.writeFileSync(`images/navbar-${index}.png`, data)
      })
    })
}

urls.forEach(scrape)
like image 560
auser Avatar asked Jun 16 '17 03:06

auser


1 Answers

Trying it from different flow, gave better results: The difference in approach is: first scroll to element and then take its bounds and then proceed for screenshot.

const Nightmare = require('nightmare');
const fs = require('fs');
const nightmare = Nightmare({
  show: true,
  openDevTools: false,
  gotoTimeout: 45000
});

nightmare.goto('https://www.google.co.in/?#safe=off&q=nightmare')
  .wait(1000)
  .evaluate(getElements, 'div.g')
  .then(() => {
    console.log("Calling screenshots: ");
    getAllScreenshots(0);
  })
  .catch(function(err) {
    console.log(err);
  });

function getAllScreenshots(index) {
  console.log("Called with index: ", index)
  nightmare.evaluate(function(index) {
      const r = Math.round;
      if(index >= window.__nightmare.output.length) {
        return false;
      }
      var element = window.__nightmare.output[index];
      console.log(index, element.innerHTML);
      element.scrollIntoView(false);
      var bound = element.getBoundingClientRect();
      return {
        x: r(bound.left)-10,
        y: r(bound.top)-10,
        width: r(element.clientWidth)+40,
        height: r(element.clientHeight)+10
      }
    }, index)
    .then(function(bound) {
      if(!bound) {
        return;
      }
      console.log("Taking screenshot: ", bound);
      nightmare.wait(500).screenshot(__dirname + '/images/navbar' + index + '.png', bound)
        .then(function() {
          console.log("Calling Next of: ", index);
          getAllScreenshots(index + 1);
        }).catch(function(err) {
          console.log(err);
        })
    })
    .catch(function(err) {
      console.log(err);
    });
}

function getElements(selector) {
  var elements = document.querySelectorAll(selector);
  window.__nightmare.output = elements;
  console.log(elements.length);
}
like image 51
devilpreet Avatar answered Nov 14 '22 07:11

devilpreet