Below are some examples:
create table BULK_COLLECT_TEST as select * from PER_ALL_PEOPLE_F;
Table created.
insert into BULK_COLLECT_TEST
select * from BULK_COLLECT_TEST;
20000 rows created.
--BLOCK1:Using Loops
declare
cursor c1
is select object_name from BULK_COLLECT_TEST;
rec1 c1%rowtype;
begin
open c1;
loop
fetch c1 into rec1;
exit when c1%notfound;
null;
end loop;
end;
total Elapsed Time is : 45 Secs
--BLOCK2: Using Bulk Collecting
declare
cursor c1 is select object_name from BULK_COLLECT_TEST;
type c1_type is table of c1%rowtype;
rec1 c1_type;
begin
open c1;
fetch c1 bulk collect into rec1;
end;
total Elapsed Time is : 5 Sec
So bulk collecting the rows shows a huge performance improvement over fetching row by row.
Some cases there are many rows to process, we can limit the number of rows to bulk collect, process those rows and fetch again.
Otherwise process memory gets bigger and bigger as you fetch the rows.
--Bulk Collect Example using LIMIT :
declare
cursor c1 is select object_name from BULK_COLLECT_TEST;
type c1_type is table of c1%rowtype;
rec1 c1_type;
begin
open c1;
loop
fetch c1 bulk collect into rec1 limit 200;
for i in 1..rec1.count loop
null;
end loop;
exit when c1%notfound;
end loop;
end;
Example:-
-----------
declare
type array is table of number index by binary_integer;
l_data array;
cursor c is select empno from emp;
begin
open c;
loop
fetch c bulk collect into l_data limit 10;
if ( c%notfound )
then
dbms_output.put_line
( 'Cursor returned NOT FOUND but array has ' || l_data.count
|| ' left to process' );
else
dbms_output.put_line
( 'We have ' || l_data.count
|| ' to process' );
end if;
exit when c%notfound;
end loop;
close c;
end;
create table BULK_COLLECT_TEST as select * from PER_ALL_PEOPLE_F;
Table created.
insert into BULK_COLLECT_TEST
select * from BULK_COLLECT_TEST;
20000 rows created.
--BLOCK1:Using Loops
declare
cursor c1
is select object_name from BULK_COLLECT_TEST;
rec1 c1%rowtype;
begin
open c1;
loop
fetch c1 into rec1;
exit when c1%notfound;
null;
end loop;
end;
total Elapsed Time is : 45 Secs
--BLOCK2: Using Bulk Collecting
declare
cursor c1 is select object_name from BULK_COLLECT_TEST;
type c1_type is table of c1%rowtype;
rec1 c1_type;
begin
open c1;
fetch c1 bulk collect into rec1;
end;
total Elapsed Time is : 5 Sec
So bulk collecting the rows shows a huge performance improvement over fetching row by row.
Some cases there are many rows to process, we can limit the number of rows to bulk collect, process those rows and fetch again.
Otherwise process memory gets bigger and bigger as you fetch the rows.
--Bulk Collect Example using LIMIT :
declare
cursor c1 is select object_name from BULK_COLLECT_TEST;
type c1_type is table of c1%rowtype;
rec1 c1_type;
begin
open c1;
loop
fetch c1 bulk collect into rec1 limit 200;
for i in 1..rec1.count loop
null;
end loop;
exit when c1%notfound;
end loop;
end;
Example:-
-----------
declare
type array is table of number index by binary_integer;
l_data array;
cursor c is select empno from emp;
begin
open c;
loop
fetch c bulk collect into l_data limit 10;
if ( c%notfound )
then
dbms_output.put_line
( 'Cursor returned NOT FOUND but array has ' || l_data.count
|| ' left to process' );
else
dbms_output.put_line
( 'We have ' || l_data.count
|| ' to process' );
end if;
exit when c%notfound;
end loop;
close c;
end;
No comments:
Post a Comment