function get_File ( p_directory varchar2, p_file_name varchar2) return BLOB
is
l_bf BFILE;
l_amount INTEGER := 32767;
l_position INTEGER := 1;
buffer RAW(32767);
l_bb BLOB;
l_file_path varchar2(256);
begin
l_bf := bfilename( p_directory , p_file_name );
dbms_lob.open(l_bf, dbms_lob.lob_readonly);
DBMS_LOB.CREATETEMPORARY(l_bb, TRUE, DBMS_LOB.SESSION);
LOOP
begin
dbms_lob.read(l_bf,l_amount, l_position, buffer);
EXCEPTION
WHEN NO_DATA_FOUND THEN
EXIT;
end;
dbms_lob.writeappend(l_bb,l_amount,buffer);
l_position := l_position + l_amount;
END LOOP;
dbms_lob.close(l_bf);
return l_bb;
end get_File;
Wednesday, July 20, 2016
get file
store password in korn
log(){
typeset rundate=$(date +"%F %X")
printf "${rundate} -- $*\n"
printf "${rundate} -- $*\n" >> $screen_log
}
do_sql(){
sql=$*
out=$(sqlplus -s '/ as sysdba'<<-eof p=""> whenever sqlerror exit 1
whenever oserror exit 1
set heading off feedback off serveroutput on trimout on pagesize 0
$sql
EOF
)
ret=$?
printf '%s' "$out"
return $ret
}
store_pwd(){
load_env $destdb
log "checking database open mode..."
sql="select status from v\$instance;"
log "running sql: $sql"
out=$(do_sql "$sql")
ret=$?
[[ $ret -ne 0 ]] && { log "*** failed to query database -- \n$out"; exit 1; }
log "got: ${out}"
typeset local_inst_status=$out
[[ $local_inst_status != "OPEN" ]] && { log "*** $destdb is not OPEN, not able to store passwords; skipping"; return 102; }
sql=$(cat<<-eof p=""> set echo off
set lines 1000
col lines for a1000
select u.name ||','|| u.spare4 ||','|| u.password ||','|| du.default_tablespace ||','|| du.profile||','||u.defrole||','||du.authentication_type||','||'"'||u.ext_username||'"' lines
from user$ u join dba_users du on (u.name=du.username) where u.name in ('SYS','SYSTEM','ORA_NCYCLE') ;
EOF
)
log "querying password hashes..."
out=$(do_sql "$sql")
ret=$?
[[ $ret -ne 0 ]] && { log "*** failed to query database -- \n$out"; return 1; }
touch $pwd_store
chmod 600 $pwd_store
echo "$out" > $pwd_store
return $?
}-eof>-eof>
restore_pwd(){
# return 102: file hashes are not there->try with cloakware
# return 1: error
load_env $destdb
log "checking dump file at $pwd_store"
[[ ! -e $pwd_store ]] && { log "dump file with hashes does not exist"; return 102; }
cnt=$(grep -E "SYS|SYSTEM|ORA_NCYCLE" $pwd_store | wc -l)
[[ $cnt -ne 3 ]] && { log "dump file does not contain rows for SYS|SYSTEM|ORA_NCYCLE"; return 102;}
while read line
do
usr=$(echo $line| awk -F, '{print $1}')
hash10g=$(echo $line| awk -F, '{print $3}')
hash11g=$(echo $line| awk -F, '{print $2}')
if [[ ! -z $hash11g ]];then
sql=$(cat<<-eof font="">-eof>
set feed on
alter user $usr identified by values '$hash11g;$hash10g';
EOF
)
elif [[ ! -z $hash10g ]];then
sql=$(cat<<-eof font="">-eof>
set feed on
alter user $usr identified by values '$hash10g';
EOF
)
else
log "password hash not found for user $usr"
return 102
fi
log "setting password for user $usr"
out=$(do_sql "$sql")
ret=$?
if [[ $ret -ne 0 && $out != *ORA-28007* ]]; then
log "*** failed to run sql -- \n$out"
return 1
elif [[ $ret -ne 0 && $out = *ORA-28007* ]]; then
log "password is already set"
else
log "password set"
fi
done < $pwd_store
rm $pwd_store
return 0
}
run sqlplus in perl
sub mask_pwd {
my $str = shift;
$str =~ s/conn(ect)?\s+(.*?)\/(.*?)([@\s])/connect $2\/(hidden)$4/ig;
$str =~ s/identified\s+by(\s+values)?\s+(.+?)(\s)/identified by$1 (hidden) $3/ig;
return $str;
}
sub run_sql {
my $sql = shift;
my $error_handling = shift;
my $error_mode;
my $exec_sqlfile;
if (defined $error_handling) {
switch ($error_handling) {
case 'IGNORE_SQL_ERROR' {
$error_mode = "WHENEVER OSERROR exit 1; ";
}
case 'IGNORE_OS_ERROR' {
$error_mode = "WHENEVER SQLERROR exit SQL.SQLCODE; ";
}
case 'IGNORE_ALL_ERROR' {
$error_mode = "";
}
}
}
else { # default
$error_mode = "WHENEVER OSERROR exit 1; \nWHENEVER SQLERROR exit SQL.SQLCODE; ";
}
if ( $sql =~ /^\//) { # starting with '/' -> filename
$exec_sqlfile = $sql;
}
else {
&xOpen_File_Write( $sqlplusfile );
&xWrite_File( $sql );
&xClose_File;
$exec_sqlfile = $sqlplusfile;
}
my $str = qq{ . /cs/oracle/local/dba/bin/basenv.ksh; . /cs/oracle/local/dba/bin/oraenv.ksh $instance;
sqlplus -s \\"/ as sysdba\\" << EOF > $sqlpluslogfile 2>&1
${error_mode}
\@$exec_sqlfile
EOF
};
$cmd = &xKSH_Call_Out_String( $str );
&xWrite_Log("running:\n$error_mode\n".&mask_pwd($sql) );
my $log = qx( $cmd );
my $err = $?>>8;
chomp(my $sqllog = &xGobble( $sqlpluslogfile ));
if ($err) {
&xWrite_Log( &mask_pwd($sqllog) );
return (-1, $sqllog);
}
else {
if ( $sql !~ /feed off/ ) {
&xWrite_Log("got:\n".&mask_pwd($sqllog));
}
return (0, $sqllog) ;
}
}
dumping system, role and proxy grants for users
set long 10000000
set longchunksize 1000000
set pages 0 head off feed off echo off
set lines 500
col lines for a500 word_wrapped
exec dbms_metadata.set_transform_param(dbms_metadata.SESSION_TRANSFORM,'SQLTERMINATOR',TRUE);
select '-- SYSTEM GRANTS' from dual;
select dbms_metadata.get_granted_ddl('SYSTEM_GRANT',du.username) as lines from dba_users du where du.username in (${preserve_grant_quoted}) and exists (select grantee from dba_sys_privs where grantee=du.username);
select '-- ROLE GRANTS' from dual;
select dbms_metadata.get_granted_ddl('ROLE_GRANT',du.username) as lines from dba_users du where du.username in (${preserve_grant_quoted}) and exists (select grantee from dba_role_privs where grantee=du.username);
select '-- DEFAULT ROLES' from dual;
select dbms_metadata.get_granted_ddl('DEFAULT_ROLE',du.username) as lines from dba_users du where du.username in (${preserve_grant_quoted}) ;
select '-- PROXY GRANTS' from dual;
select dbms_metadata.get_granted_ddl('PROXY', pu.client) as lines from proxy_users pu where pu.client in (${preserve_grant_quoted}) ;
How to test a DB-Link as SYS
VARIABLE owner VARCHAR2(30)
VARIABLE link VARCHAR2(128)
set serveroutput on
set define off
set verify off
set feed off
DECLARE
cur_remote INTEGER;
lv_usr INTEGER;
lv_ignore INTEGER;
lv_db_name VARCHAR2(256);
lv_link_name VARCHAR2(128);
BEGIN
:owner:='$db_link->[0]';
:link:='$db_link->[1]';
FOR x IN ( select dbl.owner, dbl.db_link, dbl.username, dbl.host, du.user_id from
dba_db_links dbl left join dba_users du on (dbl.owner = du.username)
where dbl.owner=:owner and dbl.db_link=:link
) LOOP
cur_remote := sys.dbms_sys_sql.open_cursor();
IF ( x.owner != 'PUBLIC' ) THEN
dbms_sys_sql.parse_as_user(
cur_remote,
'select property_value db_name
from database_properties\@'||x.db_link||
' where property_name=''GLOBAL_DB_NAME''',
DBMS_SQL.NATIVE,
x.user_id);
ELSE
dbms_sys_sql.parse(
cur_remote,
'select property_value db_name
from database_properties\@'||x.db_link||
' where property_name=''GLOBAL_DB_NAME''',
DBMS_SQL.NATIVE);
END IF;
dbms_sys_sql.define_column(cur_remote, 1, lv_db_name,256);
lv_ignore := dbms_sys_sql.execute(cur_remote);
IF dbms_sys_sql.fetch_rows(cur_remote)>0 THEN
dbms_sys_sql.column_value(cur_remote, 1, lv_db_name);
dbms_sys_sql.close_cursor(cur_remote);
END IF;
-- dbms_output.put_line('x.db_link||','||sys_context('userenv','db_name')||','||x.owner||','||regexp_substr(lv_db_name,'[^.]+')||','||x.username);
sys.pk_compliance_sys.log_dblink_compliance (x.db_link, x.owner, regexp_substr(lv_db_name,'[^.]+'),
x.username, '', '' );
dbms_output.put_line('registered link '||x.db_link||' with taget db: '||regexp_substr(lv_db_name,'[^.]+'));
END LOOP;
EXCEPTION
WHEN OTHERS THEN
IF dbms_sys_sql.is_open(cur_remote) THEN
dbms_sys_sql.CLOSE_CURSOR(cur_remote);
END IF;
RAISE;
END;
/
Tuesday, September 18, 2012
db migrations
checklist for database migrations:
- check roles and permissions to roles
- check TEMP
- check UNDO (on RAC check UNDO for each instance)
- check SGA
- check optimizer parameters
- check all possible accounts (personal and app)
Tuesday, February 21, 2012
Load SQL Baselines
1. Create a SQL Tuning set
2. Load the SQL into the set
3. Check the SQL Tuning set
4. Load the BASELINE from SQL Tuning set
5. Use EM Server->SQL Plan Control to manage the baselines
BEGIN
dbms_sqltune.create_sqlset(sqlset_name => 'EOD_856CDNTC4XDRG', description =>'Performance issue eod', sqlset_owner =>'SYS');
END;
/
2. Load the SQL into the set
DECLARE sqlset_cur dbms_sqltune.sqlset_cursor; bf VARCHAR2(37);
BEGIN
bf := 'UPPER(SQL_ID) = ''856CDNTC4XDRG'' ';
OPEN sqlset_cur FOR
SELECT VALUE(P) FROM TABLE(
dbms_sqltune.select_workload_repository( '7485', '7653', bf, NULL, NULL, NULL, NULL, 1, NULL, 'ALL')
) P;
dbms_sqltune.load_sqlset(
sqlset_name=>'EOD_856CDNTC4XDRG'
, populate_cursor=>sqlset_cur
, load_option => 'MERGE'
, update_option => 'ACCUMULATE'
, sqlset_owner=>'SYS');
END;
/
3. Check the SQL Tuning set
SELECT * FROM TABLE(DBMS_SQLTUNE.SELECT_SQLSET( 'EOD_856CDNTC4XDRG' ) );
4. Load the BASELINE from SQL Tuning set
set serveroutput on
DECLARE
l_plans_loaded PLS_INTEGER;
BEGIN
l_plans_loaded := DBMS_SPM.load_plans_from_sqlset(
sqlset_name => 'EOD_856CDNTC4XDRG'
,sqlset_owner => 'SYS');
dbms_output.put_line ('loaded '||l_plans_loaded);
END;
/
5. Use EM Server->SQL Plan Control to manage the baselines
Tuesday, December 20, 2011
run data pump out of procedure (DBMS_DATAPUMP)
1. create directory
2. create user
2. create procedure
create directory clone_auto as '';
2. create user
create user dba_clone_user identified by test123;
grant connect, create database link, resource, create view to dba_clone_user;
grant unlimited tablespace to dba_clone_user;
grant exp_full_database to dba_clone_user;
grant read,write on directory clone_auto to dba_clone_user;
grant execute on dbms_flashback to dba_clone_user;
grant create table to dba_clone_user;
grant FLASHBACK ANY TABLE to dba_clone_user;
2. create procedure
CREATE OR REPLACE PROCEDURE dba_clone_user.start_export
IS
hdl_job NUMBER;
l_cur_scn NUMBER;
l_job_state VARCHAR2 (20);
l_status SYS.ku$_status1010;
l_job_status SYS.ku$_jobstatus1010;
BEGIN
begin
execute immediate 'drop table DBA_CLONE_USER.AUTO_EXPORT';
exception when others then null;
end;
hdl_job := DBMS_DATAPUMP.OPEN ( operation => 'EXPORT', job_mode => 'SCHEMA', job_name => 'AUTO_EXPORT');
dbms_datapump.set_parameter(hdl_job, 'FLASHBACK_SCN', dbms_flashback.get_system_change_number);
dbms_datapump.set_parameter(hdl_job, 'COMPRESSION', 'METADATA_ONLY');
dbms_datapump.metadata_filter (handle => hdl_job, name => 'SCHEMA_LIST', value => '''USER1''');
dbms_datapump.set_parallel(hdl_job, 5);
DBMS_DATAPUMP.add_file (handle => hdl_job,
filename => 'user1%U.dmp',
directory => 'CLONE_AUTO',
--filesize IN VARCHAR2 DEFAULT NULL,
filetype => DBMS_DATAPUMP.ku$_file_type_dump_file,
reusefile => 1
);
DBMS_DATAPUMP.add_file (handle => hdl_job,
filename => 'auto_export.log',
DIRECTORY => 'CLONE_AUTO',
--filesize IN VARCHAR2 DEFAULT NULL,
filetype => DBMS_DATAPUMP.ku$_file_type_log_file,
reusefile => 1
);
DBMS_DATAPUMP.start_job (handle => hdl_job);
DBMS_DATAPUMP.wait_for_job (handle => hdl_job, job_state => l_job_state);
DBMS_OUTPUT.put_line ('Job exited with status:' || l_job_state);
END;
/
Thursday, July 7, 2011
transfer stats from one schema to another
exporting from schema ORIG_USER and importing into MYUSER
1. create stattab
2. create table with list of tables:
3. export stats
4. delete stats on target schema
5. copy table with stats:
6. modify the MYUSER.STATS_FOR_TEST:
7. import stats
1. create stattab
DBMS_STATS.CREATE_STAT_TABLE(ownname=>'ORIG_USER',stattab=>'STATS_FOR_TEST');
2. create table with list of tables:
create table test_tabs(object_name varchar2(200));
insert into test_tabs (select ...)
3. export stats
select 'exec dbms_stats.export_table_stats(ownname=>''ORIG_USER'',tabname=>'''|| object_name ||''',stattab=>''STATS_FOR_TEST'');' cmd from (select distinct object_name from test_tabs) t;
4. delete stats on target schema
select 'exec dbms_stats.import_table_stats(ownname=>''MYUSER'',tabname=>'''|| object_name ||''',force=>true);' cmd from (select distinct object_name from test_tabs) t;
5. copy table with stats:
create table myuser.STATS_FOR_TEST as (select * from orig_user.STATS_FOR_TEST);
6. modify the MYUSER.STATS_FOR_TEST:
update stats_for_test set c5='MYUSER';
update stats_for_test set ch1 = ora_hash(ch1) where ch1 is not null; <== to mask histogram values
7. import stats
select 'exec dbms_stats.import_table_stats(ownname=>''MYUSER'',tabname=>'''|| object_name ||''',stattab=>''STATS_FOR_TEST'',force=>true);' cmd from (select distinct object_name from test_tabs) t;
Thursday, March 10, 2011
STATS Locked after import
Metalink: ORA-38029 "Object Statistics Are Locked" - Possible Causes [ID 433240.1]
To prevent import (imp) from locking the table's statistics when importing a table without the rows (rows=n), use statistics=none. To prevent data pump import (impdp) from locking the table's statistics when importing a table without the rows (content=metadata_only), use exclude=(table_statistics,index_statistics).
To prevent import (imp) from locking the table's statistics when importing a table without the rows (rows=n), use statistics=none. To prevent data pump import (impdp) from locking the table's statistics when importing a table without the rows (content=metadata_only), use exclude=(table_statistics,index_statistics).
Subscribe to:
Posts (Atom)