On Thu, Mar 6, 2014 at 10:15 PM, Kohei KaiGai <kai...@kaigai.gr.jp> wrote:
> 2014-03-06 18:17 GMT+09:00 Haribabu Kommi <kommi.harib...@gmail.com>:
>> I will update you later regarding the performance test results.
>>

I ran the performance test on the cache scan patch and below are the readings.

Configuration:

Shared_buffers - 512MB
cache_scan.num_blocks - 600
checkpoint_segments - 255

Machine:
OS - centos - 6.4
CPU - 4 core 2.5 GHZ
Memory - 4GB

                                         Head          patched          Diff
Select -  500K                772ms        2659ms        -200%
Insert - 400K                   3429ms     1948ms          43% (I am
not sure how it improved in this case)
delete - 200K                 2066ms     3978ms        -92%
update - 200K                3915ms      5899ms        -50%

This patch shown how the custom scan can be used very well but coming
to patch as It is having
some performance problem which needs to be investigated.

I attached the test script file used for the performance test.

Regards,
Hari Babu
Fujitsu Australia
shared_buffers = 512MB
shared_preload_libraries = 'cache_scan'
cache_scan.num_blocks = 600
checkpoint_segments - 255

\timing

--cache scan select 5 million
create table test(f1 int, f2 char(70), f3 float, f4 char(100));

truncate table test;
insert into test values (generate_series(1,5000000), 'fujitsu', 1.1, 'Australia 
software tech pvt ltd');
checkpoint;

CREATE TRIGGER test_cache_row_sync AFTER INSERT OR UPDATE OR DELETE ON test FOR 
ROW EXECUTE PROCEDURE cache_scan_synchronizer();
CREATE TRIGGER test_cache_stmt_sync AFTER TRUNCATE ON test FOR STATEMENT 
EXECUTE PROCEDURE cache_scan_synchronizer();

vacuum analyze test;
explain select count(*) from test where f1 > 0;
select count(*) from test where f1 > 0;
select count(*) from test where f1 > 0;

delete from test where f1 > 1000000 and f1 <= 1200000;

update test set f1 = f1 + 3000000 where f1 > 1200000 and f1 <= 1400000;

insert into test values (generate_series(1000001, 1400000), 'fujitsu', 1.1, 
'Australia software tech pvt ltd');

drop table test cascade;

--sequence scan select 5 million
create table test(f1 int, f2 char(70), f3 float, f4 char(100));

truncate table test;
insert into test values (generate_series(1,5000000), 'fujitsu', 1.1, 'Australia 
software tech pvt ltd');
checkpoint;

vacuum analyze test;
set cache_scan.enabled = off;

explain select count(*) from test where f1 > 0;
select count(*) from test where f1 > 0;

delete from test where f1 > 1000000 and f1 <= 1200000;

update test set f1 = f1 + 3000000 where f1 > 1200000 and f1 <= 1400000;

insert into test values (generate_series(1000001, 1400000), 'fujitsu', 1.1, 
'Australia software tech pvt ltd');

drop table test cascade;


--cache scan select 500K
create table test(f1 int, f2 char(70), f3 float, f4 char(100));

truncate table test;
insert into test values (generate_series(1,5000000), 'fujitsu', 1.1, 'Australia 
software tech pvt ltd');
checkpoint;

CREATE TRIGGER test_cache_row_sync AFTER INSERT OR UPDATE OR DELETE ON test FOR 
ROW EXECUTE PROCEDURE cache_scan_synchronizer();
CREATE TRIGGER test_cache_stmt_sync AFTER TRUNCATE ON test FOR STATEMENT 
EXECUTE PROCEDURE cache_scan_synchronizer();

vacuum analyze test;
explain select count(*) from test where f1 % 10 = 5;
select count(*) from test where f1 % 10 = 5;
select count(*) from test where f1 % 10 = 5;

delete from test where f1 > 1000000 and f1 <= 1200000;

update test set f1 = f1 + 3000000 where f1 > 1200000 and f1 <= 1400000;

insert into test values (generate_series(1000001, 1400000), 'fujitsu', 1.1, 
'Australia software tech pvt ltd');

drop table test cascade;

--sequence scan select 500K
create table test(f1 int, f2 char(70), f3 float, f4 char(100));

truncate table test;
insert into test values (generate_series(1,5000000), 'fujitsu', 1.1, 'Australia 
software tech pvt ltd');
checkpoint;

vacuum analyze test;
set cache_scan.enabled = off;

explain select count(*) from test where f1 % 10 = 5;
select count(*) from test where f1 % 10 = 5;

delete from test where f1 > 1000000 and f1 <= 1200000;

update test set f1 = f1 + 3000000 where f1 > 1200000 and f1 <= 1400000;

insert into test values (generate_series(1000001, 1400000), 'fujitsu', 1.1, 
'Australia software tech pvt ltd');

drop table test cascade;
-- 
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers

Reply via email to