说明:博客里面记录的方法也是查看其它一些博客而来,但是因为查看博客后关闭了浏览器,忘记记录原作者们的链接,请见谅
1.插入
1.1建立一张临时表
CREATE table t_sequence_num1(
sequenceNum number(8) not null
)
1.2插入临时表数据
declare
i number;
begin
--先生成1万个序号
delete from t_sequence_num1;
i:=0;
for i in 0..99999 loop
insert into t_sequence_num1(sequenceNum) values(i);
end loop;
end;
1.3执行插入数据操作
begin
for i in 1..10 loop
insert /*+ append */ into sale_image
(APPSHEETSERIALNO,
SNO,
CUSTNO,
BUSINESSCODE,
FORMTYPE,
FILENAME,
FILEPATH,
SAVENAME,
FILEEXTNAME,
SECURITYLEVEL,
FILESIZE,
DOWNCOUNT,
IP,
FILESTATUS,
UPLOADUSERID,
UPLOADDATE,
UPLOADTIME,
MODIFYUSERID,
MODIFYDATE,
MODIFYTIME,
ANYSHAREID,
ORIGINALNAME)
select
'20190716000000001'||to_char(t_sequence_num1.sequencenum)||i ,
1+t_sequence_num1.sequencenum,
1018000157+t_sequence_num1.sequencenum,
to_char(t_sequence_num1.sequencenum),
'02',
'2.pdf',
'image/ARCHIV_DATA/ChientFile/1003/100000000/10300377/20190711/',
null,
'pdf',
'1',
18.00,
0,
'10.4.145.53',
'1',
0,
'20190710',
'123807',
0,
null,
null,
null,
'null'
from t_sequence_num1;
commit;
end loop;
end;
2.更新一百万条数据
alter session set current_schema=对应schema;
--记得一定先备份表数据
declare
cursor cur_t is
select
T.rowid row_id
from sale_image t
where t.filepath like 'image%' and t.fileextname='pdf';
v_counter number;
begin
v_counter := 0;
for row_t in cur_t loop
update sale_image set filepath=('/home/noahfund/sale/KdSaleDoc/' || filepath)
where rowid=row_t.row_id;
v_counter := v_counter + 1;
if (v_counter>=1000) then
commit;
v_counter := 0;
end if;
end loop;
commit;
end;
总结:插入100万数据大概20s左右,更新100万数据,用like语法,使用时间大概2分钟左右,此处用like是确实只有这个条件可以查询出需要更新的数据,暂时没有想到其他比较好的方法
网友评论