'What is causing Error TypeError: text is not iterable? - Web scraper Puppeteer NodeJs
I am learning nodejs/puppeteer and having issues getting Puppeteer to fill UPC numbers from a CSV file onto the search bar of a book website. I managed to get a the web scraper to scrape the website if I use a single UPC number.
But I am getting the below error when I used async function to CSV-Parse the UPC values from the CSV to the web scraper
(node:5876) UnhandledPromiseRejectionWarning: TypeError: text is not iterable
This is the sample CSV
DATE,QUANTITY,NAME,CODECONTENT,CODETYPE
2021-10-13 20:16:44 +1100,1,"Book 1","9781250035288",9
2021-10-13 20:16:40 +1100,1,"Book 2","9781847245601",9
2021-10-13 20:16:35 +1100,1,"Book 3","9780007149247",9
2021-10-13 20:16:30 +1100,1,"Book 4","9780749958084",9
2021-10-13 20:16:26 +1100,1,"Book 5","9781405920384",9
Is there something wrong with the way I am parsing the CSV?
function readCsvAsync(filename, delimiter=',', encoding='utf-8') {
return new Promise((resolve, reject) => {
const rows = [];
try {
fs.createReadStream(filename, {encoding: encoding})
.pipe(parse({delimiter: delimiter}))
.on('data', (row) => rows.push(+row.CODECONTENT))
.on('end', () => resolve(rows))
.on('error', reject);
} catch (err) {
reject(err);
}
});
}
async function upcData() {
try {
const rows = await readCsvAsync('Book_Bulk.csv', ':');
// console.log(csvData);
// call puppeteer or whatever
return rows;
} catch (err) {
console.log(err);
}
}
Full code below:
const puppeteer = require('puppeteer');
const parse = require('csv-parser');
const fs = require('fs');
async function getpageData(page,upc){
await page.goto('https://www.bookdepository.com/');
await page.type('#book-search-form > div.el-wrap.header-search-el-wrap > input.text-input',upc);
await page.click('#book-search-form > div.el-wrap.header-search-el-wrap > button');
//Title
await page.waitForSelector('.item-info h1');
const title = await page.$eval('.item-info h1', h1 => h1.textContent);
//Author
await page.waitForSelector('div.author-info.hidden-md > span > a > span');
const author = await page.$eval('div.author-info.hidden-md > span > a > span', span => span.innerText);
//Genre
await page.waitForSelector('.active a');
const genre = await page.$eval('.active a', a => a.innerText);
//Format
await page.waitForSelector('.item-info li');
const format = await page.$eval('.item-info li', li => li.innerText);
//Publisher
await page.waitForSelector('div.biblio-wrap > div > ul > li:nth-child(4) > span > a > span');
const publisher = await page.$eval('div.biblio-wrap > div > ul > li:nth-child(4) > span > a > span', span => span.innerText);
//Year
await page.waitForSelector('div.biblio-wrap > div > ul > li:nth-child(3) > span');
const year = await page.$eval('div.biblio-wrap > div > ul > li:nth-child(3) > span', span => span.innerText);
const newyear = year.slice(-4)
// Price
try {
await page.waitForSelector('div.price.item-price-wrap.hidden-xs.hidden-sm > span', { timeout: 1000 });
const price = await page.$eval('div.price.item-price-wrap.hidden-xs.hidden-sm > span', span => span.innerText);
var newprice = price.slice(-6);
} catch {
await page.waitForSelector('p.list-price');
const price = await page.$eval('p.list-price', p => p.innerText);
var newprice = price.slice(-6);
} finally {
await page.waitForSelector('div.price.item-price-wrap.hidden-xs.hidden-sm > span.sale-price');
const price = await page.$eval('div.price.item-price-wrap.hidden-xs.hidden-sm > span.sale-price', span => span.innerText);
var newprice = price.slice(-6);
}
// console.log(title);
// console.log(author);
// console.log(genre);
// console.log(format);
// console.log(publisher);
// console.log(newyear);
// console.log(newprice);
return {
title: title,
author: author,
genre: genre,
format: format,
publisher: publisher,
year: newyear,
price: newprice
}
};
function readCsvAsync(filename, delimiter=',', encoding='utf-8') {
return new Promise((resolve, reject) => {
const rows = [];
try {
fs.createReadStream(filename, {encoding: encoding})
.pipe(parse({delimiter: delimiter}))
.on('data', (row) => rows.push(+row.CODECONTENT))
.on('end', () => resolve(rows))
.on('error', reject);
} catch (err) {
reject(err);
}
});
}
async function upcData() {
try {
const rows = await readCsvAsync('Book_Bulk.csv', ':');
// console.log(csvData);
// call puppeteer or whatever
return rows;
} catch (err) {
console.log(err);
}
}
async function main(){
const allupcs = await upcData();
// console.log(allupcs);
const browser = await puppeteer.launch({ headless: false, defaultViewport: null, args: ['--start-maximized']});
const page = await browser.newPage();
const scrapedData = [];
for(let upc of allupcs){
const data = await getpageData(page,upc);
scrapedData.push(data);
}
console.log(scrapedData);
}
main();
Solution 1:[1]
To read your CSV file, I would recommend this:
fs.readFile(filename, 'utf8', (err, data) => {
if (err) throw err;
const AllContacts = data.split('\n')
AllContacts.forEach((el) => {
// And to access specific data
const date = el[0]
const quanity = el[1]
const name = el[2]
const codeContent = el[3]
const codeType = el[4]
})
})
Sources
This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.
Source: Stack Overflow
Solution | Source |
---|---|
Solution 1 | Mikhail Zub |