Commit 6e886e10 authored by Clownce Deng's avatar Clownce Deng

+) 添加脚本;

parent 2679146b
/*
* @File: batchExecutor.js
* @Description: 批量执行pgsql数据插入脚本
* @Author: clownce.deng
* @Date: 2020-05-13 14:48:00
*/
var _ = require("lodash");
var fs = require("fs");
var moment = require("moment");
var logger = require("topsin.logger");
var DB = require("topsin.database");
var error = require("topsin.error");
var console = require("console");
var process = require('process');
var ioprocess = require('topsin.ioprocess');
var config = require("./config");
try {
var argv = process.argv;
// 脚本选择 0:batchInsert脚本,1:COPY脚本
var type = _.toNumber(argv[1]);
// 进程数量
var process_count = _.toNumber(argv[2]);
// 插入数据行数
var data_row_count = _.toNumber(argv[3]);
if (_.isNaN(type) || _.isNaN(process_count) || _.isNaN(data_row_count) || process_count == 0 || data_row_count == 0) {
throw "invalid parameter.";
}
var script_name = "";
if (type == 0) {
script_name = "./pgBatchInsert.js";
} else if (type == 1) {
script_name = "./pgBatchCopy.js";
} else {
throw "invalid parameter: " + type;
}
for (var count = 1; count <= process_count; count++) {
var ret = ioprocess.startDetached(config.topjs_path, [script_name, count, data_row_count]);
console.info("Process " + count + " result: " + ret);
}
} catch (e) {
console.error(e);
}
\ No newline at end of file
/*
* @File: config.js
* @Description: 脚本配置
* @Author: clownce.deng
* @Date: 2020-05-15 21:15:00
*/
module.exports = {
// 数据库配置
database_conf: {
database_type: "pg",
database_host: "127.0.0.1:5432",
database_name: "TOPMES6_TEST_V6",
database_user: "toplinker",
database_pwd: "TopLinker0510",
auto_close_timeout: -1
},
// TopJS位置
topjs_path: "D:/Programs/TopJS/3.3.1/bin/topjs3.exe",
// 工作目录
work_path: "F:/workspace/pg_batch_insert"
};
\ No newline at end of file
/*
* @File: getTotalTime.js
* @Description: 统计执行时间到一个文件
* @Author: clownce.deng
* @Date: 2020-05-15 10:49:00
*/
var _ = require("lodash");
var fs = require("fs");
var moment = require("moment");
var logger = require("topsin.logger");
var DB = require("topsin.database");
var error = require("topsin.error");
var console = require("console");
var process = require('process');
var ioprocess = require('topsin.ioprocess');
var config = require("./config");
try {
var argv = process.argv;
// 进程数量
var process_count = _.toNumber(argv[1]);
if (_.isNaN(process_count) || process_count == 0) {
throw "invalid parameter.";
}
var total_content = "";
for (var count = 1; count <= process_count; count++) {
var file_path = config.work_path + "/process_" + count + ".txt";
var content = fs.readFile(file_path);
console.info(content);
total_content += content + "\n";
}
fs.writeFile(config.work_path + "/total_time_cost.txt", total_content);
} catch (e) {
console.error(e);
}
\ No newline at end of file
/*
* @File: pgBatchCopy.js
* @Description: pgsql通过COPY命令批量插入数据测试
* @Author: clownce.deng
* @Date: 2020-05-14 08:57:00
*/
var _ = require("lodash");
var fs = require("fs");
var moment = require("moment");
var logger = require("topsin.logger");
var DB = require("topsin.database");
var error = require("topsin.error");
var console = require("console");
var process = require('process');
var config = require("./config");
try {
var argv = process.argv;
// 工作中心ID
var wid = argv[1];
// 插入数据行数
var data_row_count = _.toNumber(argv[2]);
if (_.isNaN(data_row_count) || data_row_count == 0) {
throw "invalid parameter: " + argv[2];
}
// 初始化数据库连接
var LOCAL_DB_CONN = 'LOCAL_DB_CONN';
DB.addConnection(config.database_conf, LOCAL_DB_CONN);
// 测试数据库连接
var isConnected = DB.query(LOCAL_DB_CONN, function (q) {
return q.isConnected();
});
if (!isConnected) {
throw "Connect to local database failed.";
}
console.info("Connect to database sucessful.");
// 生成本地文件
randomBuildLogFile(wid, data_row_count);
console.info("start to batchInsert data...");
var query = DB.query(LOCAL_DB_CONN);
var beforeTime = moment();
query.begin();
// COPY服务端寻找文件,\COPY客户端寻找文件
var sql = "COPY oee_machine_log_v2 (workcenter_id,log_time,log_type,lot_no,partnumber,subpart,lot_serial,station,state,"
+ "programe_name,daq_time,analysis_flag,log_data) FROM '{0}/data{1}.csv' delimiter ',' csv header";
sql = _.format(sql, config.work_path, wid);
// 导出数据到文件,带分表不能直接导出,需用select查询
// var sql = "COPY ( select workcenter_id,log_time,log_time2,log_type,lot_no,partnumber,subpart,lot_serial,station,state,programe_name,"
// + "daq_time,analysis_flag,log_data from oee_machine_log_v2) TO 'F:/workspace/pg_batch_insert/data1.csv' CSV HEADER";
query.execSql(sql);
if (query.lastError().isValid()) {
query.rollback();
throw "batchInsert data faild. " + query.lastError().text();
}
query.commit();
var afterTime = moment();
console.info("batchInsert data success.");
var duration = moment.duration(afterTime.diff(beforeTime));
console.info("elapsed time(seconds): " + duration.as("seconds"));
fs.writeFile(config.work_path + "/process_" + wid + ".txt", duration.as("seconds"));
} catch (e) {
console.error(e);
}
function randomBuildLogFile(workcenter_id, row_count) {
var dataCount = row_count;
var rowData = [];
rowData.push("workcenter_id");
rowData.push("log_time");
rowData.push("log_type");
rowData.push("lot_no");
rowData.push("partnumber");
rowData.push("subpart");
rowData.push("lot_serial");
rowData.push("station");
rowData.push("state");
rowData.push("programe_name");
rowData.push("daq_time");
rowData.push("analysis_flag");
rowData.push("log_data");
var content = _.join(rowData, ",");
content += "\n";
for (var index = 0; index < dataCount; index++) {
var rowData = [];
rowData.push(workcenter_id);
rowData.push(moment().format("YYYY-MM-DD HH:mm:ss"));
rowData.push("info");
rowData.push("1234567890");
rowData.push("ABCDEFGH");
rowData.push("test_part");
rowData.push("12345");
rowData.push("test_station");
rowData.push("test_state");
rowData.push("test_program");
rowData.push(moment().format("YYYY-MM-DD HH:mm:ss"));
rowData.push("t");
rowData.push(randomBuildMapData());
content += _.join(rowData, ",");
content += "\n";
}
fs.writeFile(config.work_path + "/data" + workcenter_id + ".csv", content);
}
function randomBuildMapData() {
var retMap = {};
for (var count = 1; count <= 100; count++) {
retMap["test_key_" + count] = "test_value_" + count;
}
var retStr;
var retList = [];
_.forEach(retMap, function (v, k) {
retList.push("\"\"" + k + "\"\"" + ": " + "\"\"" + v + "\"\"");
});
retStr = "\"{" + _.join(retList, ",") + "}\"";
return retStr;
}
\ No newline at end of file
/*
* @File: pgBatchInsert.js
* @Description: pgsql批量插入数据测试
* @Author: clownce.deng
* @Date: 2020-05-13 14:40:00
*/
var _ = require("lodash");
var fs = require("fs");
var moment = require("moment");
var logger = require("topsin.logger");
var DB = require("topsin.database");
var error = require("topsin.error");
var console = require("console");
var process = require('process');
var config = require("./config");
try {
var argv = process.argv;
// 工作中心ID
var wid = argv[1];
// 插入数据行数
var data_row_count = _.toNumber(argv[2]);
if (_.isNaN(data_row_count) || data_row_count == 0) {
throw "invalid parameter: " + argv[2];
}
// 初始化数据库连接
var LOCAL_DB_CONN = 'LOCAL_DB_CONN';
DB.addConnection(config.database_conf, LOCAL_DB_CONN);
// 测试数据库连接
var isConnected = DB.query(LOCAL_DB_CONN, function (q) {
return q.isConnected();
});
if (!isConnected) {
throw "Connect to local database failed.";
}
console.info("Connect to database sucessful.");
// 生成随机数据
console.info("start to build random data...");
var logDataList = randomBuildLogList(wid, data_row_count);
// 批量插入数据到数据库
console.info("start to batchInsert data...");
var query = DB.query(LOCAL_DB_CONN);
var beforeTime = moment();
query.begin();
query.batchInsert("oee_machine_log_v2", _.keys(logDataList[0]), logDataList);
if (query.lastError().isValid()) {
query.rollback();
throw "batchInsert data faild. " + query.lastError().text();
}
query.commit();
var afterTime = moment();
console.info("batchInsert data success.");
var duration = moment.duration(afterTime.diff(beforeTime));
console.info("elapsed time(seconds): " + duration.as("seconds"));
fs.writeFile(config.work_path + "/process_" + wid + ".txt", duration.as("seconds"));
} catch (e) {
console.error(e);
}
function randomBuildLogList(workcenter_id, row_count) {
var dataCount = row_count;
var retDataList = [];
for (var index = 0; index < dataCount; index++) {
var dataMap = {};
dataMap['workcenter_id'] = workcenter_id;
dataMap['log_time'] = moment().format("YYYY-MM-DD HH:mm:ss");
dataMap['log_type'] = "info";
dataMap['lot_no'] = "1234567890";
dataMap['partnumber'] = "ABCDEFGH";
dataMap['subpart'] = "test_part";
dataMap['lot_serial'] = 12345;
dataMap['station'] = "test_station";
dataMap['state'] = "test_state";
dataMap['programe_name'] = "test_program";
dataMap['daq_time'] = moment().format("YYYY-MM-DD HH:mm:ss");
dataMap['analysis_flag'] = true;
dataMap['log_data'] = randomBuildMapData();
retDataList.push(dataMap);
}
return retDataList;
}
function randomBuildMapData() {
var retMap = {};
for (var count = 1; count <= 100; count++) {
retMap["test_key_" + count] = "test_value_" + count;
}
return retMap;
}
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment