Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Sign in / Register
Toggle navigation
P
pg-batch-insert-test
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Locked Files
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Clownce Deng
pg-batch-insert-test
Commits
57840ece
Commit
57840ece
authored
May 20, 2020
by
Clownce Deng
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
*) 代码优化;
parent
d0b6f4f8
Changes
5
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
192 additions
and
200 deletions
+192
-200
batchExecutor.js
src/batchExecutor.js
+5
-13
config.js
src/config.js
+1
-1
pgBatchCopy.js
src/pgBatchCopy.js
+0
-118
pgBatchInsert.js
src/pgBatchInsert.js
+95
-68
pgBatchInsertTmp.js
src/pgBatchInsertTmp.js
+91
-0
No files found.
src/batchExecutor.js
View file @
57840ece
...
...
@@ -18,23 +18,15 @@ var config = require("./config");
try
{
var
argv
=
process
.
argv
;
// 脚本选择 0:batchInsert脚本,1:COPY脚本
var
type
=
_
.
toNumber
(
argv
[
1
]);
// 进程数量
var
process_count
=
_
.
toNumber
(
argv
[
2
]);
var
process_count
=
_
.
toNumber
(
argv
[
1
]);
// 插入数据行数
var
data_row_count
=
_
.
toNumber
(
argv
[
3
]);
if
(
_
.
isNaN
(
type
)
||
_
.
isNaN
(
process_count
)
||
_
.
isNaN
(
data_row_count
)
||
process_count
==
0
||
data_row_count
==
0
)
{
var
data_row_count
=
_
.
toNumber
(
argv
[
2
]);
if
(
_
.
isNaN
(
process_count
)
||
_
.
isNaN
(
data_row_count
)
||
process_count
==
0
||
data_row_count
==
0
)
{
throw
"invalid parameter."
;
}
var
script_name
=
""
;
if
(
type
==
0
)
{
script_name
=
"./pgBatchInsert.js"
;
}
else
if
(
type
==
1
)
{
script_name
=
"./pgBatchCopy.js"
;
}
else
{
throw
"invalid parameter: "
+
type
;
}
var
script_name
=
"./pgBatchInsert.js"
;
for
(
var
count
=
1
;
count
<=
process_count
;
count
++
)
{
var
ret
=
ioprocess
.
startDetached
(
config
.
topjs_path
,
[
script_name
,
count
,
data_row_count
]);
console
.
info
(
"Process "
+
count
+
" result: "
+
ret
);
...
...
src/config.js
View file @
57840ece
...
...
@@ -9,7 +9,7 @@ module.exports = {
// 数据库配置
database_conf
:
{
database_type
:
"pg"
,
database_host
:
"1
92.168.2.103
:5432"
,
database_host
:
"1
27.0.0.1
:5432"
,
database_name
:
"TOPMES6_TEST_V6"
,
database_user
:
"toplinker"
,
database_pwd
:
"TopLinker0510"
,
...
...
src/pgBatchCopy.js
deleted
100644 → 0
View file @
d0b6f4f8
/*
* @File: pgBatchCopy.js
* @Description: pgsql通过COPY命令批量插入数据测试
* @Author: clownce.deng
* @Date: 2020-05-14 08:57:00
*/
var
_
=
require
(
"lodash"
);
var
fs
=
require
(
"fs"
);
var
moment
=
require
(
"moment"
);
var
logger
=
require
(
"topsin.logger"
);
var
DB
=
require
(
"topsin.database"
);
var
error
=
require
(
"topsin.error"
);
var
console
=
require
(
"console"
);
var
process
=
require
(
'process'
);
var
config
=
require
(
"./config"
);
try
{
var
argv
=
process
.
argv
;
// 工作中心ID
var
wid
=
argv
[
1
];
// 插入数据行数
var
data_row_count
=
_
.
toNumber
(
argv
[
2
]);
if
(
_
.
isNaN
(
data_row_count
)
||
data_row_count
==
0
)
{
throw
"invalid parameter: "
+
argv
[
2
];
}
// 初始化数据库连接
var
LOCAL_DB_CONN
=
'LOCAL_DB_CONN'
;
DB
.
addConnection
(
config
.
database_conf
,
LOCAL_DB_CONN
);
// 测试数据库连接
var
isConnected
=
DB
.
query
(
LOCAL_DB_CONN
,
function
(
q
)
{
return
q
.
isConnected
();
});
if
(
!
isConnected
)
{
throw
"Connect to local database failed."
;
}
console
.
info
(
"Connect to database sucessful."
);
// 生成本地文件
randomBuildLogFile
(
wid
,
data_row_count
);
console
.
info
(
"start to batchInsert data..."
);
var
query
=
DB
.
query
(
LOCAL_DB_CONN
);
var
beforeTime
=
moment
();
query
.
begin
();
// COPY服务端寻找文件,\COPY客户端寻找文件(注意:\COPY只能在SQL Shell中执行,它是变相调用COPY FROM STDIN)
var
sql
=
"COPY oee_machine_log_v2 (workcenter_id,log_time,log_type,lot_no,partnumber,subpart,lot_serial,station,state,"
+
"programe_name,daq_time,analysis_flag,log_data) FROM '{0}/data{1}.csv' delimiter ',' csv header"
;
sql
=
_
.
format
(
sql
,
config
.
work_path
,
wid
);
// 导出数据到文件,带分表不能直接导出,需用select查询
// var sql = "COPY ( select workcenter_id,log_time,log_time2,log_type,lot_no,partnumber,subpart,lot_serial,station,state,programe_name,"
// + "daq_time,analysis_flag,log_data from oee_machine_log_v2) TO 'F:/workspace/pg_batch_insert/data1.csv' CSV HEADER";
query
.
execSql
(
sql
);
if
(
query
.
lastError
().
isValid
())
{
query
.
rollback
();
throw
"batchInsert data faild. "
+
query
.
lastError
().
text
();
}
query
.
commit
();
var
afterTime
=
moment
();
console
.
info
(
"batchInsert data success."
);
var
duration
=
moment
.
duration
(
afterTime
.
diff
(
beforeTime
));
console
.
info
(
"elapsed time(seconds): "
+
duration
.
as
(
"seconds"
));
fs
.
writeFile
(
config
.
work_path
+
"/process_"
+
wid
+
".txt"
,
duration
.
as
(
"seconds"
));
}
catch
(
e
)
{
console
.
error
(
e
);
}
function
randomBuildLogFile
(
workcenter_id
,
row_count
)
{
var
dataCount
=
row_count
;
var
rowData
=
[];
rowData
.
push
(
"workcenter_id"
);
rowData
.
push
(
"log_time"
);
rowData
.
push
(
"log_type"
);
rowData
.
push
(
"lot_no"
);
rowData
.
push
(
"partnumber"
);
rowData
.
push
(
"subpart"
);
rowData
.
push
(
"lot_serial"
);
rowData
.
push
(
"station"
);
rowData
.
push
(
"state"
);
rowData
.
push
(
"programe_name"
);
rowData
.
push
(
"daq_time"
);
rowData
.
push
(
"analysis_flag"
);
rowData
.
push
(
"log_data"
);
var
content
=
_
.
join
(
rowData
,
","
);
content
+=
"
\n
"
;
for
(
var
index
=
0
;
index
<
dataCount
;
index
++
)
{
var
rowData
=
[];
rowData
.
push
(
workcenter_id
);
rowData
.
push
(
moment
().
format
(
"YYYY-MM-DD HH:mm:ss"
));
rowData
.
push
(
"info"
);
rowData
.
push
(
"1234567890"
);
rowData
.
push
(
"ABCDEFGH"
);
rowData
.
push
(
"test_part"
);
rowData
.
push
(
"12345"
);
rowData
.
push
(
"test_station"
);
rowData
.
push
(
"test_state"
);
rowData
.
push
(
"test_program"
);
rowData
.
push
(
moment
().
format
(
"YYYY-MM-DD HH:mm:ss"
));
rowData
.
push
(
"t"
);
rowData
.
push
(
randomBuildMapData
());
content
+=
_
.
join
(
rowData
,
","
);
content
+=
"
\n
"
;
}
fs
.
writeFile
(
config
.
work_path
+
"/data"
+
workcenter_id
+
".csv"
,
content
);
}
function
randomBuildMapData
()
{
var
retMap
=
{};
for
(
var
count
=
1
;
count
<=
0
;
count
++
)
{
retMap
[
"test_key_"
+
count
]
=
"test_value_"
+
count
;
}
var
retStr
;
var
retList
=
[];
_
.
forEach
(
retMap
,
function
(
v
,
k
)
{
retList
.
push
(
"
\"\"
"
+
k
+
"
\"\"
"
+
": "
+
"
\"\"
"
+
v
+
"
\"\"
"
);
});
retStr
=
"
\"
{"
+
_
.
join
(
retList
,
","
)
+
"}
\"
"
;
return
retStr
;
}
\ No newline at end of file
src/pgBatchInsert.js
View file @
57840ece
/*
* @File: pgBatch
Insert
.js
* @Description: pgsql批量插入数据测试
* @File: pgBatch
Copy
.js
* @Description: pgsql
通过COPY命令
批量插入数据测试
* @Author: clownce.deng
* @Date: 2020-05-1
3 14:40
:00
* @Date: 2020-05-1
4 08:57
:00
*/
var
_
=
require
(
"lodash"
);
...
...
@@ -16,76 +16,103 @@ var process = require('process');
var
config
=
require
(
"./config"
);
try
{
var
argv
=
process
.
argv
;
// 工作中心ID
var
wid
=
argv
[
1
];
// 插入数据行数
var
data_row_count
=
_
.
toNumber
(
argv
[
2
]);
var
argv
=
process
.
argv
;
// 工作中心ID
var
wid
=
argv
[
1
];
// 插入数据行数
var
data_row_count
=
_
.
toNumber
(
argv
[
2
]);
if
(
_
.
isNaN
(
data_row_count
)
||
data_row_count
==
0
)
{
throw
"invalid parameter: "
+
argv
[
2
];
}
// 初始化数据库连接
var
LOCAL_DB_CONN
=
'LOCAL_DB_CONN'
;
DB
.
addConnection
(
config
.
database_conf
,
LOCAL_DB_CONN
);
// 测试数据库连接
var
isConnected
=
DB
.
query
(
LOCAL_DB_CONN
,
function
(
q
)
{
return
q
.
isConnected
();
});
if
(
!
isConnected
)
{
throw
"Connect to local database failed."
;
}
console
.
info
(
"Connect to database sucessful."
);
// 生成随机数据
console
.
info
(
"start to build random data..."
);
var
logDataList
=
randomBuildLogList
(
wid
,
data_row_count
);
// 批量插入数据到数据库
console
.
info
(
"start to batchInsert data..."
);
var
query
=
DB
.
query
(
LOCAL_DB_CONN
);
query
.
begin
();
var
beforeTime
=
moment
();
query
.
batchInsert
(
"oee_machine_log_v2"
,
_
.
keys
(
logDataList
[
0
]),
logDataList
);
if
(
query
.
lastError
().
isValid
())
{
query
.
rollback
();
throw
"batchInsert data faild. "
+
query
.
lastError
().
text
();
}
query
.
commit
();
var
afterTime
=
moment
();
console
.
info
(
"batchInsert data success."
);
var
duration
=
moment
.
duration
(
afterTime
.
diff
(
beforeTime
));
console
.
info
(
"elapsed time(seconds): "
+
duration
.
as
(
"seconds"
));
fs
.
writeFile
(
config
.
work_path
+
"/process_"
+
wid
+
".txt"
,
duration
.
as
(
"seconds"
));
// 初始化数据库连接
var
LOCAL_DB_CONN
=
'LOCAL_DB_CONN'
;
DB
.
addConnection
(
config
.
database_conf
,
LOCAL_DB_CONN
);
// 测试数据库连接
var
isConnected
=
DB
.
query
(
LOCAL_DB_CONN
,
function
(
q
)
{
return
q
.
isConnected
();
});
if
(
!
isConnected
)
{
throw
"Connect to local database failed."
;
}
console
.
info
(
"Connect to database sucessful."
);
// 生成本地文件
randomBuildLogFile
(
wid
,
data_row_count
);
console
.
info
(
"start to batchInsert data..."
);
var
query
=
DB
.
query
(
LOCAL_DB_CONN
);
var
beforeTime
=
moment
();
query
.
begin
();
// COPY服务端寻找文件,\COPY客户端寻找文件(注意:\COPY只能在SQL Shell中执行,它是变相调用COPY FROM STDIN)
var
sql
=
"COPY oee_machine_log_v2 (workcenter_id,log_time,log_type,lot_no,partnumber,subpart,lot_serial,station,state,"
+
"programe_name,daq_time,analysis_flag,log_data) FROM '{0}/data{1}.csv' delimiter ',' csv header"
;
sql
=
_
.
format
(
sql
,
config
.
work_path
,
wid
);
// 导出数据到文件,带分表不能直接导出,需用select查询
// var sql = "COPY ( select workcenter_id,log_time,log_time2,log_type,lot_no,partnumber,subpart,lot_serial,station,state,programe_name,"
// + "daq_time,analysis_flag,log_data from oee_machine_log_v2) TO 'F:/workspace/pg_batch_insert/data1.csv' CSV HEADER";
query
.
execSql
(
sql
);
if
(
query
.
lastError
().
isValid
())
{
query
.
rollback
();
throw
"batchInsert data faild. "
+
query
.
lastError
().
text
();
}
query
.
commit
();
var
afterTime
=
moment
();
console
.
info
(
"batchInsert data success."
);
var
duration
=
moment
.
duration
(
afterTime
.
diff
(
beforeTime
));
console
.
info
(
"elapsed time(seconds): "
+
duration
.
as
(
"seconds"
));
fs
.
writeFile
(
config
.
work_path
+
"/process_"
+
wid
+
".txt"
,
duration
.
as
(
"seconds"
));
}
catch
(
e
)
{
console
.
error
(
e
);
}
function
randomBuildLogList
(
workcenter_id
,
row_count
)
{
var
dataCount
=
row_count
;
var
retDataList
=
[];
for
(
var
index
=
0
;
index
<
dataCount
;
index
++
)
{
var
dataMap
=
{};
dataMap
[
'workcenter_id'
]
=
workcenter_id
;
dataMap
[
'log_time'
]
=
moment
().
format
(
"YYYY-MM-DD HH:mm:ss"
);
dataMap
[
'log_type'
]
=
"info"
;
dataMap
[
'lot_no'
]
=
"1234567890"
;
dataMap
[
'partnumber'
]
=
"ABCDEFGH"
;
dataMap
[
'subpart'
]
=
"test_part"
;
dataMap
[
'lot_serial'
]
=
12345
;
dataMap
[
'station'
]
=
"test_station"
;
dataMap
[
'state'
]
=
"test_state"
;
dataMap
[
'programe_name'
]
=
"test_program"
;
dataMap
[
'daq_time'
]
=
moment
().
format
(
"YYYY-MM-DD HH:mm:ss"
);
dataMap
[
'analysis_flag'
]
=
true
;
dataMap
[
'log_data'
]
=
randomBuildMapData
();
retDataList
.
push
(
dataMap
);
}
return
retDataList
;
console
.
error
(
e
);
}
function
randomBuildMapData
()
{
var
retMap
=
{};
for
(
var
count
=
1
;
count
<=
0
;
count
++
)
{
retMap
[
"test_key_"
+
count
]
=
"test_value_"
+
count
;
function
randomBuildLogFile
(
workcenter_id
,
row_count
)
{
var
dataCount
=
row_count
;
var
rowData
=
[];
rowData
.
push
(
"workcenter_id"
);
rowData
.
push
(
"log_time"
);
rowData
.
push
(
"log_type"
);
rowData
.
push
(
"lot_no"
);
rowData
.
push
(
"partnumber"
);
rowData
.
push
(
"subpart"
);
rowData
.
push
(
"lot_serial"
);
rowData
.
push
(
"station"
);
rowData
.
push
(
"state"
);
rowData
.
push
(
"programe_name"
);
rowData
.
push
(
"daq_time"
);
rowData
.
push
(
"analysis_flag"
);
rowData
.
push
(
"log_data"
);
var
content
=
_
.
join
(
rowData
,
","
);
content
+=
"
\n
"
;
for
(
var
index
=
0
;
index
<
dataCount
;
index
++
)
{
var
rowData
=
[];
rowData
.
push
(
workcenter_id
);
rowData
.
push
(
moment
().
format
(
"YYYY-MM-DD HH:mm:ss"
));
rowData
.
push
(
"info"
);
rowData
.
push
(
"1234567890"
);
rowData
.
push
(
"ABCDEFGH"
);
rowData
.
push
(
"test_part"
);
rowData
.
push
(
"12345"
);
rowData
.
push
(
"test_station"
);
rowData
.
push
(
"test_state"
);
rowData
.
push
(
"test_program"
);
rowData
.
push
(
moment
().
format
(
"YYYY-MM-DD HH:mm:ss"
));
rowData
.
push
(
"t"
);
rowData
.
push
(
randomBuildMapData
());
content
+=
_
.
join
(
rowData
,
","
);
content
+=
"
\n
"
;
}
fs
.
writeFile
(
config
.
work_path
+
"/data"
+
workcenter_id
+
".csv"
,
content
);
}
return
retMap
;
}
\ No newline at end of file
function
randomBuildMapData
()
{
var
retMap
=
{};
for
(
var
count
=
1
;
count
<=
100
;
count
++
)
{
retMap
[
"test_key_"
+
count
]
=
"test_value_"
+
count
;
}
var
retStr
;
var
retList
=
[];
_
.
forEach
(
retMap
,
function
(
v
,
k
)
{
retList
.
push
(
"
\"\"
"
+
k
+
"
\"\"
"
+
": "
+
"
\"\"
"
+
v
+
"
\"\"
"
);
});
retStr
=
"
\"
{"
+
_
.
join
(
retList
,
","
)
+
"}
\"
"
;
return
retStr
;
}
\ No newline at end of file
src/pgBatchInsertTmp.js
0 → 100644
View file @
57840ece
/*
* @File: pgBatchInsert.js
* @Description: pgsql批量插入数据测试
* @Author: clownce.deng
* @Date: 2020-05-13 14:40:00
*/
var
_
=
require
(
"lodash"
);
var
fs
=
require
(
"fs"
);
var
moment
=
require
(
"moment"
);
var
logger
=
require
(
"topsin.logger"
);
var
DB
=
require
(
"topsin.database"
);
var
error
=
require
(
"topsin.error"
);
var
console
=
require
(
"console"
);
var
process
=
require
(
'process'
);
var
config
=
require
(
"./config"
);
try
{
var
argv
=
process
.
argv
;
// 工作中心ID
var
wid
=
argv
[
1
];
// 插入数据行数
var
data_row_count
=
_
.
toNumber
(
argv
[
2
]);
if
(
_
.
isNaN
(
data_row_count
)
||
data_row_count
==
0
)
{
throw
"invalid parameter: "
+
argv
[
2
];
}
// 初始化数据库连接
var
LOCAL_DB_CONN
=
'LOCAL_DB_CONN'
;
DB
.
addConnection
(
config
.
database_conf
,
LOCAL_DB_CONN
);
// 测试数据库连接
var
isConnected
=
DB
.
query
(
LOCAL_DB_CONN
,
function
(
q
)
{
return
q
.
isConnected
();
});
if
(
!
isConnected
)
{
throw
"Connect to local database failed."
;
}
console
.
info
(
"Connect to database sucessful."
);
// 生成随机数据
console
.
info
(
"start to build random data..."
);
var
logDataList
=
randomBuildLogList
(
wid
,
data_row_count
);
// 批量插入数据到数据库
console
.
info
(
"start to batchInsert data..."
);
var
query
=
DB
.
query
(
LOCAL_DB_CONN
);
query
.
begin
();
var
beforeTime
=
moment
();
query
.
batchInsert
(
"oee_machine_log_v2"
,
_
.
keys
(
logDataList
[
0
]),
logDataList
);
if
(
query
.
lastError
().
isValid
())
{
query
.
rollback
();
throw
"batchInsert data faild. "
+
query
.
lastError
().
text
();
}
query
.
commit
();
var
afterTime
=
moment
();
console
.
info
(
"batchInsert data success."
);
var
duration
=
moment
.
duration
(
afterTime
.
diff
(
beforeTime
));
console
.
info
(
"elapsed time(seconds): "
+
duration
.
as
(
"seconds"
));
fs
.
writeFile
(
config
.
work_path
+
"/process_"
+
wid
+
".txt"
,
duration
.
as
(
"seconds"
));
}
catch
(
e
)
{
console
.
error
(
e
);
}
function
randomBuildLogList
(
workcenter_id
,
row_count
)
{
var
dataCount
=
row_count
;
var
retDataList
=
[];
for
(
var
index
=
0
;
index
<
dataCount
;
index
++
)
{
var
dataMap
=
{};
dataMap
[
'workcenter_id'
]
=
workcenter_id
;
dataMap
[
'log_time'
]
=
moment
().
format
(
"YYYY-MM-DD HH:mm:ss"
);
dataMap
[
'log_type'
]
=
"info"
;
dataMap
[
'lot_no'
]
=
"1234567890"
;
dataMap
[
'partnumber'
]
=
"ABCDEFGH"
;
dataMap
[
'subpart'
]
=
"test_part"
;
dataMap
[
'lot_serial'
]
=
12345
;
dataMap
[
'station'
]
=
"test_station"
;
dataMap
[
'state'
]
=
"test_state"
;
dataMap
[
'programe_name'
]
=
"test_program"
;
dataMap
[
'daq_time'
]
=
moment
().
format
(
"YYYY-MM-DD HH:mm:ss"
);
dataMap
[
'analysis_flag'
]
=
true
;
dataMap
[
'log_data'
]
=
randomBuildMapData
();
retDataList
.
push
(
dataMap
);
}
return
retDataList
;
}
function
randomBuildMapData
()
{
var
retMap
=
{};
for
(
var
count
=
1
;
count
<=
10
;
count
++
)
{
retMap
[
"test_key_"
+
count
]
=
"test_value_"
+
count
;
}
return
retMap
;
}
\ No newline at end of file
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment