A node package used to extract a DOM element from a remote page or a string, using selectors. Based on jsdom for fetching and parsing, and juice for inlining css.
npm install dom-extractor
var extractor = require('dom-extractor');
extractor.fetch("http://github.com/", "div.header", function(data){
//data contains the extracted HTML with css inlined, here the github header
});
var extractor = require('dom-extractor');
extractor.fetch("<div class='a'>Hello</div><div class='b'>World</div>!", ".a", function(data){
//should contains the div with class a
});
When you use # as a selector, the browser do not send the data since it is a keyword for anchoring things in page, browser side. To use it anyway, use |sharp| as the selector.
You can use options as the second parameter. List of current options are:
{
selector: String, //set the selector for extraction default is body
innerText: Boolean, //get text only from extraction, no html or css default is false
inlineCss: Boolean //Put style in style attributes of extracted dom default is true
}
Example, using div.header selector and getting text only from result:
var extractor = require('dom-extractor');
extractor.fetch("http://github.com/", { selector: "div.header", innerText: true }, function(data){
//data contains the extracted HTML with css inlined, here the github header
});
app.use('/proxy', extractor.middleware());
npm install
npm test