Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

Import SQL dump within Node environment

I'd like a npm script to create/configure/etc. and finally import a SQL dump. The entire creation, configuring, etc. is all working, however, I cannot get the import to work. The data never is inserted. Here's what I have (nevermind the nested callback as they'll be turned into promises):

connection.query(`DROP DATABASE IF EXISTS ${config.database};`, err => {
  connection.query(`CREATE DATABASE IF NOT EXISTS ${config.database};`, err => {
    connection.query('use DATABASENAME', err => {
      const sqlDumpPath = path.join(__dirname, 'sql-dump/sql-dump.sql');
      connection.query(`SOURCE ${sqlDumpPath}`, err => {
        connection.end(err => resolve());
      });
    })
  });
});

I also tried the following with Sequelize (ORM):

return new Promise(resolve => {
  const sqlDumpPath = path.join(__dirname, 'sql-dump/sql-dump.sql');
  fs.readFile('./sql/dump.sql', 'utf-8', (err, data) => {
    sequelize
      .query(data)
      .then(resolve)
      .catch(console.error);
  });
}); 
like image 405
Detuned Avatar asked Apr 03 '16 22:04

Detuned


People also ask

How do I import a SQL file into node js?

The following are the steps I took. Step 1, install the NPM package called sqlite3 (Read the sqlite3 docs here). sqlite3 helps you to connect to your SQLite database and to run queries. Step 2, In your the JS file where you want to run the SQLs, import/require sqlite3 and fs (No, you don't need to install this one.

Can we use SQL database with node js?

You can connect to a SQL Database using Node. js on Windows, Linux, or macOS.

What is dump command in SQL?

It dumps one or more MySQL databases for backup or transfer to another SQL server. The mysqldump command can also generate output in CSV, other delimited text, or XML format.


2 Answers

Here's how I set up my initial Sequelized import using the migrations framework. There is plenty of going on here but in short I:

  1. find the latest sql-dump in the migrations folder
  2. read the file using fs
  3. split the text into queries
  4. check if its a valid query and if so apply some cleaning that my data required (see related post)
  5. push an array full of queries - I start with making sure that the database is clean by calling the this.down first
  6. run everything as a promise (as suggested here) using the mapSeries (not the map)

Using sequelize-cli you can in your shell create a migration by writing:

sequelize migration:create

And you will automatically have the file where you enter the code below. In order to execute the migration you simply write:

sequelize db:migrate
"use strict";
const promise = require("bluebird");
const fs = require("fs");
const path = require("path");
const assert = require("assert");
const db = require("../api/models"); // To be able to run raw queries
const debug = require("debug")("my_new_api");

// I needed this in order to get some encoding issues straight
const Aring = new RegExp(String.fromCharCode(65533) +
  "\\" + String.fromCharCode(46) + "{1,3}", "g");
const Auml = new RegExp(String.fromCharCode(65533) +
  String.fromCharCode(44) + "{1,3}", "g");
const Ouml = new RegExp(String.fromCharCode(65533) +
  String.fromCharCode(45) + "{1,3}", "g");

module.exports = {
  up: function (queryInterface, Sequelize) {
    // The following section allows me to have multiple sql-files and only use the last dump
    var last_sql;
    for (let fn of fs.readdirSync(__dirname)){
      if (fn.match(/\.sql$/)){
        fn = path.join(__dirname, fn);
        var stats = fs.statSync(fn);
        if (typeof last_sql === "undefined" ||
            last_sql.stats.mtime < stats.mtime){
          last_sql = {
            filename: fn,
            stats: stats
          };
        }
      }
    }
    assert(typeof last_sql !== "undefined", "Could not find any valid sql files in " + __dirname);

    // Split file into queries
    var queries = fs.readFileSync(last_sql.filename).toString().split(/;\n/);

    var actions = [{
      query: "Running the down section",
      exec: this.down
    }]; // Clean database by calling the down first

    for (let i in queries){
      // Skip empty queries and the character set information in the 40101 section
      //   as this would most likely require a multi-query set-up
      if (queries[i].trim().length == 0 ||
          queries[i].match(new RegExp("/\\*!40101 .+ \\*/"))){
        continue;
      }

      // The manual fixing of encoding
      let clean_query = queries[i]
        .replace(Aring, "Å")
        .replace(Ouml, "Ö")
        .replace(Auml, "Ä");

      actions.push({
        query: clean_query.substring(0, 200), // We save a short section of the query only for debugging purposes
        exec: () => db.sequelize.query(clean_query)
      });
    }

    // The Series is important as the order isn't retained with just map
    return promise.mapSeries(actions, function(item) {
      debug(item.query);

      return item.exec();
    }, { concurrency: 1 });
  },

  down: function (queryInterface, Sequelize) {
    var tables_2_drop = [
      "items",
      "users",
      "usertypes"
    ];
    var actions = [];
    for (let tbl of tables_2_drop){
      actions.push({
        // The created should be created_at
        exec: () => db.sequelize.query("DROP TABLE IF EXISTS `" + tbl +"`")
      });
    }

    return promise.map(actions, function(item) {
      return item.exec();
    }, { concurrency: 1 });/**/
  }
};
like image 131
Max Gordon Avatar answered Oct 28 '22 12:10

Max Gordon


Based loosely on Max Gordon's answer, here's my code to run a MySQL Dump file from NodeJs/Sequelize:

"use strict";

const fs = require("fs");
const path = require("path");

/**
 * Start off with a MySQL Dump file, import that, and then migrate to the latest version.
 *
 * @param dbName {string} the name of the database
 * @param mysqlDumpFile {string} The full path to the file to import as a starting point
 */
module.exports.migrateFromFile = function(dbName, mysqlDumpFile) {
  let sequelize = createSequelize(dbName);
  console.log("Importing from " + mysqlDumpFile + "...");
  let queries = fs.readFileSync(mysqlDumpFile, {encoding: "UTF-8"}).split(";\n");

  console.log("Importing dump file...");

  // Setup the DB to import data in bulk.
  let promise = sequelize.query("set FOREIGN_KEY_CHECKS=0"
  ).then(() => {
    return sequelize.query("set UNIQUE_CHECKS=0");
  }).then(() => {
    return sequelize.query("set SQL_MODE='NO_AUTO_VALUE_ON_ZERO'");
  }).then(() => {
    return sequelize.query("set SQL_NOTES=0");
  });

  console.time("Importing mysql dump");
  for (let query of queries) {
    query = query.trim();
    if (query.length !== 0 && !query.match(/\/\*/)) {
      promise = promise.then(() => {
        console.log("Executing: " + query.substring(0, 100));
        return sequelize.query(query, {raw: true});
      })
    }
  }

  return promise.then(() => {
    console.timeEnd("Importing mysql dump");

    console.log("Migrating the rest of the way...");
    console.time("Migrating after importing mysql dump");
    return exports.migrateUp(dbName); // Run the rest of your migrations
  }).then(() => {
    console.timeEnd("Migrating after importing mysql dump");
  });

};
like image 40
Ryan Shillington Avatar answered Oct 28 '22 11:10

Ryan Shillington