'FS.Readable Stream skipping rows when importing to noSQL using Mongoose schemas

I am attempting to import a CSV using my mongoose modal and regardless of its size, I am importing the first 2 rows and then every other row.

const fs = require('mz/fs');
const { parse } = require('@fast-csv/parse');
const streamToIterator = require('stream-to-iterator');
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
const Product = require('./schemas/Product');

mongoose.Promise = global.Promise;

const options = {
  useNewUrlParser: true,
  useUnifiedTopology: true,
};
const database = mongoose
  .connect(
    process.env.DATABASE_URL,
    options
  )
  .then((db) =>
    (async function () {
      console.log('Connected to database.');

      try {

        await Promise.all(
          Object.entries(db.models).map(([k, m]) => m.deleteMany())
        );

        let headers = Object.keys(Product.schema.paths).filter(
          (k) => ['_id', '__v'].indexOf(k) === -1
        );

        if (await fs.exists('./database.csv')) {
          let stream = fs
            .createReadStream('./database.csv')
            .pipe(parse({ headers }));

          const iterator = await streamToIterator(stream).init();

          let buffer = [],
            counter = 0;

          for (let docPromise of iterator) {
            let doc = await docPromise;

            buffer.push(doc);
            counter++;

            if (counter > 10000) {
              await Product.insertMany(buffer);
              buffer = [];
              counter = 0;
            }
          }

          if (counter > 0) {
            await Product.insertMany(buffer);

            buffer = [];
            counter = 0;
          }
        }
      } catch (e) {
        console.error(e);
      }
    })()
  )
  .catch((err) => console.error('Error connecting to database:', err));

module.exports = database;

When I look at my doc variable, it is already in a malformed state (every other row) and when I'm reading the stream, it is already in a malformed state so I'm assuming its occurring around there?



Solution 1:[1]

What I ended up doing to resolve this was to just turn the CSV into JSON and importing it as normal. Not ideal or really addresses the underlying issue but my database has what it needs.

Sources

This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.

Source: Stack Overflow

Solution Source
Solution 1 Brandon Gorson