Commit 57840ece authored by Clownce Deng's avatar Clownce Deng

*) 代码优化;

parent d0b6f4f8
......@@ -18,23 +18,15 @@ var config = require("./config");
try {
var argv = process.argv;
// 脚本选择 0:batchInsert脚本,1:COPY脚本
var type = _.toNumber(argv[1]);
// 进程数量
var process_count = _.toNumber(argv[2]);
var process_count = _.toNumber(argv[1]);
// 插入数据行数
var data_row_count = _.toNumber(argv[3]);
if (_.isNaN(type) || _.isNaN(process_count) || _.isNaN(data_row_count) || process_count == 0 || data_row_count == 0) {
var data_row_count = _.toNumber(argv[2]);
if ( _.isNaN(process_count) || _.isNaN(data_row_count) || process_count == 0 || data_row_count == 0) {
throw "invalid parameter.";
}
var script_name = "";
if (type == 0) {
script_name = "./pgBatchInsert.js";
} else if (type == 1) {
script_name = "./pgBatchCopy.js";
} else {
throw "invalid parameter: " + type;
}
var script_name = "./pgBatchInsert.js";
for (var count = 1; count <= process_count; count++) {
var ret = ioprocess.startDetached(config.topjs_path, [script_name, count, data_row_count]);
console.info("Process " + count + " result: " + ret);
......
......@@ -9,7 +9,7 @@ module.exports = {
// 数据库配置
database_conf: {
database_type: "pg",
database_host: "192.168.2.103:5432",
database_host: "127.0.0.1:5432",
database_name: "TOPMES6_TEST_V6",
database_user: "toplinker",
database_pwd: "TopLinker0510",
......
/*
* @File: pgBatchCopy.js
* @Description: pgsql通过COPY命令批量插入数据测试
* @Author: clownce.deng
* @Date: 2020-05-14 08:57:00
*/
var _ = require("lodash");
var fs = require("fs");
var moment = require("moment");
var logger = require("topsin.logger");
var DB = require("topsin.database");
var error = require("topsin.error");
var console = require("console");
var process = require('process');
var config = require("./config");
try {
var argv = process.argv;
// 工作中心ID
var wid = argv[1];
// 插入数据行数
var data_row_count = _.toNumber(argv[2]);
if (_.isNaN(data_row_count) || data_row_count == 0) {
throw "invalid parameter: " + argv[2];
}
// 初始化数据库连接
var LOCAL_DB_CONN = 'LOCAL_DB_CONN';
DB.addConnection(config.database_conf, LOCAL_DB_CONN);
// 测试数据库连接
var isConnected = DB.query(LOCAL_DB_CONN, function (q) {
return q.isConnected();
});
if (!isConnected) {
throw "Connect to local database failed.";
}
console.info("Connect to database sucessful.");
// 生成本地文件
randomBuildLogFile(wid, data_row_count);
console.info("start to batchInsert data...");
var query = DB.query(LOCAL_DB_CONN);
var beforeTime = moment();
query.begin();
// COPY服务端寻找文件,\COPY客户端寻找文件(注意:\COPY只能在SQL Shell中执行,它是变相调用COPY FROM STDIN)
var sql = "COPY oee_machine_log_v2 (workcenter_id,log_time,log_type,lot_no,partnumber,subpart,lot_serial,station,state,"
+ "programe_name,daq_time,analysis_flag,log_data) FROM '{0}/data{1}.csv' delimiter ',' csv header";
sql = _.format(sql, config.work_path, wid);
// 导出数据到文件,带分表不能直接导出,需用select查询
// var sql = "COPY ( select workcenter_id,log_time,log_time2,log_type,lot_no,partnumber,subpart,lot_serial,station,state,programe_name,"
// + "daq_time,analysis_flag,log_data from oee_machine_log_v2) TO 'F:/workspace/pg_batch_insert/data1.csv' CSV HEADER";
query.execSql(sql);
if (query.lastError().isValid()) {
query.rollback();
throw "batchInsert data faild. " + query.lastError().text();
}
query.commit();
var afterTime = moment();
console.info("batchInsert data success.");
var duration = moment.duration(afterTime.diff(beforeTime));
console.info("elapsed time(seconds): " + duration.as("seconds"));
fs.writeFile(config.work_path + "/process_" + wid + ".txt", duration.as("seconds"));
} catch (e) {
console.error(e);
}
function randomBuildLogFile(workcenter_id, row_count) {
var dataCount = row_count;
var rowData = [];
rowData.push("workcenter_id");
rowData.push("log_time");
rowData.push("log_type");
rowData.push("lot_no");
rowData.push("partnumber");
rowData.push("subpart");
rowData.push("lot_serial");
rowData.push("station");
rowData.push("state");
rowData.push("programe_name");
rowData.push("daq_time");
rowData.push("analysis_flag");
rowData.push("log_data");
var content = _.join(rowData, ",");
content += "\n";
for (var index = 0; index < dataCount; index++) {
var rowData = [];
rowData.push(workcenter_id);
rowData.push(moment().format("YYYY-MM-DD HH:mm:ss"));
rowData.push("info");
rowData.push("1234567890");
rowData.push("ABCDEFGH");
rowData.push("test_part");
rowData.push("12345");
rowData.push("test_station");
rowData.push("test_state");
rowData.push("test_program");
rowData.push(moment().format("YYYY-MM-DD HH:mm:ss"));
rowData.push("t");
rowData.push(randomBuildMapData());
content += _.join(rowData, ",");
content += "\n";
}
fs.writeFile(config.work_path + "/data" + workcenter_id + ".csv", content);
}
function randomBuildMapData() {
var retMap = {};
for (var count = 1; count <= 0; count++) {
retMap["test_key_" + count] = "test_value_" + count;
}
var retStr;
var retList = [];
_.forEach(retMap, function (v, k) {
retList.push("\"\"" + k + "\"\"" + ": " + "\"\"" + v + "\"\"");
});
retStr = "\"{" + _.join(retList, ",") + "}\"";
return retStr;
}
\ No newline at end of file
/*
* @File: pgBatchInsert.js
* @Description: pgsql批量插入数据测试
* @File: pgBatchCopy.js
* @Description: pgsql通过COPY命令批量插入数据测试
* @Author: clownce.deng
* @Date: 2020-05-13 14:40:00
* @Date: 2020-05-14 08:57:00
*/
var _ = require("lodash");
......@@ -16,76 +16,103 @@ var process = require('process');
var config = require("./config");
try {
var argv = process.argv;
// 工作中心ID
var wid = argv[1];
// 插入数据行数
var data_row_count = _.toNumber(argv[2]);
var argv = process.argv;
// 工作中心ID
var wid = argv[1];
// 插入数据行数
var data_row_count = _.toNumber(argv[2]);
if (_.isNaN(data_row_count) || data_row_count == 0) {
throw "invalid parameter: " + argv[2];
}
// 初始化数据库连接
var LOCAL_DB_CONN = 'LOCAL_DB_CONN';
DB.addConnection(config.database_conf, LOCAL_DB_CONN);
// 测试数据库连接
var isConnected = DB.query(LOCAL_DB_CONN, function (q) {
return q.isConnected();
});
if (!isConnected) {
throw "Connect to local database failed.";
}
console.info("Connect to database sucessful.");
// 生成随机数据
console.info("start to build random data...");
var logDataList = randomBuildLogList(wid, data_row_count);
// 批量插入数据到数据库
console.info("start to batchInsert data...");
var query = DB.query(LOCAL_DB_CONN);
query.begin();
var beforeTime = moment();
query.batchInsert("oee_machine_log_v2", _.keys(logDataList[0]), logDataList);
if (query.lastError().isValid()) {
query.rollback();
throw "batchInsert data faild. " + query.lastError().text();
}
query.commit();
var afterTime = moment();
console.info("batchInsert data success.");
var duration = moment.duration(afterTime.diff(beforeTime));
console.info("elapsed time(seconds): " + duration.as("seconds"));
fs.writeFile(config.work_path + "/process_" + wid + ".txt", duration.as("seconds"));
// 初始化数据库连接
var LOCAL_DB_CONN = 'LOCAL_DB_CONN';
DB.addConnection(config.database_conf, LOCAL_DB_CONN);
// 测试数据库连接
var isConnected = DB.query(LOCAL_DB_CONN, function (q) {
return q.isConnected();
});
if (!isConnected) {
throw "Connect to local database failed.";
}
console.info("Connect to database sucessful.");
// 生成本地文件
randomBuildLogFile(wid, data_row_count);
console.info("start to batchInsert data...");
var query = DB.query(LOCAL_DB_CONN);
var beforeTime = moment();
query.begin();
// COPY服务端寻找文件,\COPY客户端寻找文件(注意:\COPY只能在SQL Shell中执行,它是变相调用COPY FROM STDIN)
var sql = "COPY oee_machine_log_v2 (workcenter_id,log_time,log_type,lot_no,partnumber,subpart,lot_serial,station,state,"
+ "programe_name,daq_time,analysis_flag,log_data) FROM '{0}/data{1}.csv' delimiter ',' csv header";
sql = _.format(sql, config.work_path, wid);
// 导出数据到文件,带分表不能直接导出,需用select查询
// var sql = "COPY ( select workcenter_id,log_time,log_time2,log_type,lot_no,partnumber,subpart,lot_serial,station,state,programe_name,"
// + "daq_time,analysis_flag,log_data from oee_machine_log_v2) TO 'F:/workspace/pg_batch_insert/data1.csv' CSV HEADER";
query.execSql(sql);
if (query.lastError().isValid()) {
query.rollback();
throw "batchInsert data faild. " + query.lastError().text();
}
query.commit();
var afterTime = moment();
console.info("batchInsert data success.");
var duration = moment.duration(afterTime.diff(beforeTime));
console.info("elapsed time(seconds): " + duration.as("seconds"));
fs.writeFile(config.work_path + "/process_" + wid + ".txt", duration.as("seconds"));
} catch (e) {
console.error(e);
}
function randomBuildLogList(workcenter_id, row_count) {
var dataCount = row_count;
var retDataList = [];
for (var index = 0; index < dataCount; index++) {
var dataMap = {};
dataMap['workcenter_id'] = workcenter_id;
dataMap['log_time'] = moment().format("YYYY-MM-DD HH:mm:ss");
dataMap['log_type'] = "info";
dataMap['lot_no'] = "1234567890";
dataMap['partnumber'] = "ABCDEFGH";
dataMap['subpart'] = "test_part";
dataMap['lot_serial'] = 12345;
dataMap['station'] = "test_station";
dataMap['state'] = "test_state";
dataMap['programe_name'] = "test_program";
dataMap['daq_time'] = moment().format("YYYY-MM-DD HH:mm:ss");
dataMap['analysis_flag'] = true;
dataMap['log_data'] = randomBuildMapData();
retDataList.push(dataMap);
}
return retDataList;
console.error(e);
}
function randomBuildMapData() {
var retMap = {};
for (var count = 1; count <= 0; count++) {
retMap["test_key_" + count] = "test_value_" + count;
function randomBuildLogFile(workcenter_id, row_count) {
var dataCount = row_count;
var rowData = [];
rowData.push("workcenter_id");
rowData.push("log_time");
rowData.push("log_type");
rowData.push("lot_no");
rowData.push("partnumber");
rowData.push("subpart");
rowData.push("lot_serial");
rowData.push("station");
rowData.push("state");
rowData.push("programe_name");
rowData.push("daq_time");
rowData.push("analysis_flag");
rowData.push("log_data");
var content = _.join(rowData, ",");
content += "\n";
for (var index = 0; index < dataCount; index++) {
var rowData = [];
rowData.push(workcenter_id);
rowData.push(moment().format("YYYY-MM-DD HH:mm:ss"));
rowData.push("info");
rowData.push("1234567890");
rowData.push("ABCDEFGH");
rowData.push("test_part");
rowData.push("12345");
rowData.push("test_station");
rowData.push("test_state");
rowData.push("test_program");
rowData.push(moment().format("YYYY-MM-DD HH:mm:ss"));
rowData.push("t");
rowData.push(randomBuildMapData());
content += _.join(rowData, ",");
content += "\n";
}
fs.writeFile(config.work_path + "/data" + workcenter_id + ".csv", content);
}
return retMap;
}
\ No newline at end of file
function randomBuildMapData() {
var retMap = {};
for (var count = 1; count <= 100; count++) {
retMap["test_key_" + count] = "test_value_" + count;
}
var retStr;
var retList = [];
_.forEach(retMap, function (v, k) {
retList.push("\"\"" + k + "\"\"" + ": " + "\"\"" + v + "\"\"");
});
retStr = "\"{" + _.join(retList, ",") + "}\"";
return retStr;
}
\ No newline at end of file
/*
* @File: pgBatchInsert.js
* @Description: pgsql批量插入数据测试
* @Author: clownce.deng
* @Date: 2020-05-13 14:40:00
*/
var _ = require("lodash");
var fs = require("fs");
var moment = require("moment");
var logger = require("topsin.logger");
var DB = require("topsin.database");
var error = require("topsin.error");
var console = require("console");
var process = require('process');
var config = require("./config");
try {
var argv = process.argv;
// 工作中心ID
var wid = argv[1];
// 插入数据行数
var data_row_count = _.toNumber(argv[2]);
if (_.isNaN(data_row_count) || data_row_count == 0) {
throw "invalid parameter: " + argv[2];
}
// 初始化数据库连接
var LOCAL_DB_CONN = 'LOCAL_DB_CONN';
DB.addConnection(config.database_conf, LOCAL_DB_CONN);
// 测试数据库连接
var isConnected = DB.query(LOCAL_DB_CONN, function (q) {
return q.isConnected();
});
if (!isConnected) {
throw "Connect to local database failed.";
}
console.info("Connect to database sucessful.");
// 生成随机数据
console.info("start to build random data...");
var logDataList = randomBuildLogList(wid, data_row_count);
// 批量插入数据到数据库
console.info("start to batchInsert data...");
var query = DB.query(LOCAL_DB_CONN);
query.begin();
var beforeTime = moment();
query.batchInsert("oee_machine_log_v2", _.keys(logDataList[0]), logDataList);
if (query.lastError().isValid()) {
query.rollback();
throw "batchInsert data faild. " + query.lastError().text();
}
query.commit();
var afterTime = moment();
console.info("batchInsert data success.");
var duration = moment.duration(afterTime.diff(beforeTime));
console.info("elapsed time(seconds): " + duration.as("seconds"));
fs.writeFile(config.work_path + "/process_" + wid + ".txt", duration.as("seconds"));
} catch (e) {
console.error(e);
}
function randomBuildLogList(workcenter_id, row_count) {
var dataCount = row_count;
var retDataList = [];
for (var index = 0; index < dataCount; index++) {
var dataMap = {};
dataMap['workcenter_id'] = workcenter_id;
dataMap['log_time'] = moment().format("YYYY-MM-DD HH:mm:ss");
dataMap['log_type'] = "info";
dataMap['lot_no'] = "1234567890";
dataMap['partnumber'] = "ABCDEFGH";
dataMap['subpart'] = "test_part";
dataMap['lot_serial'] = 12345;
dataMap['station'] = "test_station";
dataMap['state'] = "test_state";
dataMap['programe_name'] = "test_program";
dataMap['daq_time'] = moment().format("YYYY-MM-DD HH:mm:ss");
dataMap['analysis_flag'] = true;
dataMap['log_data'] = randomBuildMapData();
retDataList.push(dataMap);
}
return retDataList;
}
function randomBuildMapData() {
var retMap = {};
for (var count = 1; count <= 10; count++) {
retMap["test_key_" + count] = "test_value_" + count;
}
return retMap;
}
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment