I concur with briklen a number of different ways of doing this depending on your preference, for me postgres has such nice built in features that are quick and easy to use seems rude not to use them... the ability to handle CSV, regular expressions and json_build_object
year,date,shares,trades,dollars
2010,"01/04/2010","1,425,504,460","4,628,115","$38,495,460,645"
2010,"01/05/2010","1,754,011,750","5,394,016","$43,932,043,406"
**/
create extension file_fdw;
create server pgcsv foreign data wrapper file_fdw;
drop foreign table intest;
create foreign table intest(
year int,
date text,
shares text,
trades text,
dollars text
)server pgcsv
options(filename '/DATA/dev/test.csv', format 'csv', header 'true');
drop table if exists factbookjsonb;
create table factbookjsonb as
select
year,
json_build_object(
'date',date,
'shares',regexp_replace(shares,',','','g')::bigint,
'trades',regexp_replace(trades,',','','g')::bigint,
'dollars',regexp_replace(dollars,'[\$,]','','g')::bigint
) as data
from intest;
select * from factbookjsonb;