Of Interest
Be responsible for your future Enter the USA legally!
Visa, Green Card, Citizenship, Passport Consultation Services
|
Oracle
»
UNIX
»
Commands
»
SQLLDR
SQL Loader
Use this high performance utility for table-based imports.
Drop the table's indexes before the load, and rebuild them afterwards.
Drawback with this utility: you'll need to build a control file for
each table, not a trivial process.
# command sample: uses "direct path" which bypasses redo
sqlldr userid=scott/tiger data=/apps/temp/customer.dat control=customer.ctl direct=true
# list parameters
sqlldr help=Y
userid ORACLE username/password
control Control file name
log Log file name
bad Bad file name
data Data file name
discard Discard file name
discardmax Number of discards to allow
skip Number of logical records to skip
load Number of logical records to load
errors Number of errors to allow
rows Number of rows in conventional path bind array or between direct path data saves
bindsize Size of conventional path bind array in bytes
silent Suppress messages during run (header,feedback,errors,discards,partitions)
direct use direct path
_synchro internal testing
parfile parameter file: name of file that contains parameter specifications
parallel do parallel load
file File to allocate extents from
skip_unusable_indexes disallow/allow unusable indexes or index partitions
skip_index_maintenance do not maintain indexes, mark affected indexes as unusable
commit_discontinued commit loaded rows when load is discontinued
_display_exitcode Display exit code for SQL*Loader execution
readsize Size of Read buffer
Using SQL Loader
1) create a control file:
LOAD DATA
INFILE 'places.txt'
INTO TABLE places
INSERT
FIELDS TERMINATED BY X'09' TRAILING NULLCOLS
(
loc_id,
name,
city,
phone
)
2) make sure the destination table is empty
3) run the command, as the oracle user:
sqlldr userid=user99/tiger control=places.ctl direct=true skip_index_maintenance=true readsize=131070
This command will run SQL loader, using 'direct path', and skip index block updates -
the indexes will need to be rebuilt later, along with the analyze command.
Direct path - skips constraints, along with redo - writes are direct, no commit.
<h4>Sample control, importing Sybase dates</h4>
LOAD DATA
INFILE 'company.dat'
INTO TABLE company
INSERT
FIELDS TERMINATED BY X'09' TRAILING NULLCOLS
(
COMPANY_ID ,
COUNTRY_ID ,
INDUSTRY_ID ,
INSERT_DT date "mon dd yyyy hh:miam" terminated by X'09',
MOD_DT date "mon dd yyyy hh:miam" terminated by X'09',
ENDING_EFF_DT date "mon dd yyyy hh:miam" terminated by X'09',
TRADE_COUNTRY_ID ,
COMPANY_NAME ,
DATA_ID )
<h4>Exporting to a delimited file</h4>
Note: blob/long raw/long field types are not exportable using this technique !
Option 1: Select as file format
set feedback off
set pages 0
spool file2.dat
select col_a||','||col_b||','||to_char(col_c) from tablename ;
spool off
Option 2: Use CTL file maker and SQL Loader
Install the three scripts detailed below, then run as follows:
(sample calls)
# export all tables, to ctl files
exp2ctl scott/tiger /export/home/dba/temp
cd /export/home/dba/temp
# import using sql loader, to different schema ( or instance! )
ctl2ora steve/lion
'sqlldr_exp' script from Oracle site (author: T.Kyte).
This script is called from the following two scripts.
Note: left justify all commands if using paste function !
#!/usr/bin/ksh
# Script: sqlldr_exp
# export single oracle table to a ctl file,
# for use with sqlldr
if [ "$1" = "" ]
then
cat << EOF
usage:sqlldr_exp un/pw [tables|views]
example: sqlldr_exp scott/tiger mytable
description: Select over standard out all rows of table or view with
columns delimited by pipes.
EOF
exit
fi
PW=$1
tname=$2
shift
for X in $*
do
sqlplus -s $PW << EOF > /tmp/flat$$.sql
set wrap off
set feedback off
set pagesize 0
set verify off
prompt prompt LOAD DATA
prompt prompt INFILE *
prompt prompt INTO TABLE $X
prompt prompt TRUNCATE
prompt prompt FIELDS TERMINATED BY '|'
prompt prompt (
select 'prompt ' || decode(column_id,1,'',',') || lower(column_name)
from user_tab_columns
where table_name = upper('$X')
/
prompt prompt )
prompt prompt BEGINDATA
prompt select
select lower(column_name)||'||chr(124)||'
from user_tab_columns
where table_name = upper('$X') and
column_id != (select max(column_id) from user_tab_columns where
table_name = upper('$X'))
/
select lower(column_name)
from user_tab_columns
where table_name = upper('$X') and
column_id = (select max(column_id) from user_tab_columns where
table_name = upper('$X'))
/
prompt from $X
prompt /
prompt exit
exit
EOF
sqlplus -s $PW << EOF > $tname.ctl
set wrap off
set feedback off
set pagesize 0
set verify off
set trimspool on
set linesize 5000
start /tmp/flat$$.sql
exit
EOF
rm /tmp/flat$$.sql
done
Bonus Script I: Create control files for each table
in a schema.
#!/usr/bin/ksh
#==========================================
# Script: exp2ctl
# Export all tables to control files,
# for sqlldr import
#
# Parms: userid/pwd
# output directory
#
# ** Check location of sqlldr_exp script, modify call as needed (expcmd)
#==========================================
orauser=$1
dir1=$2
if ! test -d $dir1 ; then
echo "Directory not found: " $dir1
echo " "
echo " "
echo " "
exit
fi
shfile=$dir1/ctlmake.sh
awkfile=$dir1/ctlmake.awk
tfile=$dir1/tnames.dat
expcmd=sqlldr_exp
echo "Using schema ................" $orauser
echo "Using output directory ......" $dir1
sleep 2
echo "Creating table list ..."
sqlplus -s $orauser @user_tables.sql | egrep "[A-Z]" > $tfile
sleep 2
echo "Creating script ..."
echo "cd $dir1" > $shfile
echo "set -x" >> $shfile
echo "{print \"$expcmd\" \" $orauser \" \$1 }" > $awkfile
cat $dir1/tnames.dat | awk -f $dir1/ctlmake.awk >> $shfile
chmod 700 $shfile
sleep 2
cat $shfile
sleep 3
echo "Running ctl make ..."
$shfile
Bonus Script II: Run in all ctl files, in the current directory
( file must end in .ctl )
#!/bin/ksh
#-----------------------------
# Script: ctl2ora
# Control File Processor
# for SQL Loader
# Parms: userid/pwd
#-----------------------------
if test $# -lt 1 ; then
echo " "
echo "usage: "
echo $0 ' userid/pwd'
echo " "
exit
fi
for fname in *.ctl ; do
echo "sqlldr: " $fname
if test -f $fname ; then
sqlldr userid=$1 control=$fname direct=true readsize=131070
## Faster version!
## sqlldr userid=$1 control=$fname direct=true skip_index_maintenance=true readsize=131070
fi
done
<h4>Using PL-SQL to Export Data to a File</h4>
--
-- This procedure exports 2 columns from a table
-- to a double pipe delimited file.
--
-- Note: long raw / long data fields are not exportable using this technique
create or replace procedure proc_exp_cad (rcount in integer) is
count1 integer default 0 ;
file1 utl_file.file_type;
company_id1 integer;
industry_id1 integer;
line1 varchar2(1000);
cursor cur1 is
select company_id, industry_id
from company_annual_data2 ;
begin
file1 := utl_file.fopen('/apps/temp','cad.out','w',32000);
open cur1;
loop
count1 := count1 + 1 ;
fetch cur1 into company_id1, industry_id1 ;
exit when cur1%NOTFOUND ;
exit when count1 > rcount ;
line1 := to_char(company_id1) || '||' ||
to_char(industry_id1) ;
utl_file.put_line(file1,line1);
end loop ;
utl_file.fclose(file1);
end ;
/
call proc_exp_cad(30)
/
show errors
/
|
|
|
Get the latest Rocket99 news and tech tips via
|