Tuesday, September 18, 2012

db migrations

checklist for database migrations:

  • check roles and permissions to roles
  • check TEMP
  • check UNDO (on RAC check UNDO for each instance)
  • check SGA
  • check optimizer parameters
  • check all possible accounts (personal and app)

Tuesday, February 21, 2012

Load SQL Baselines

1. Create a SQL Tuning set
BEGIN
dbms_sqltune.create_sqlset(sqlset_name => 'EOD_856CDNTC4XDRG', description =>'Performance issue eod', sqlset_owner =>'SYS');
END;
/

2. Load the SQL into the set
DECLARE sqlset_cur dbms_sqltune.sqlset_cursor; bf VARCHAR2(37);
BEGIN
bf := 'UPPER(SQL_ID) = ''856CDNTC4XDRG'' ';
OPEN sqlset_cur FOR
SELECT VALUE(P) FROM TABLE(
dbms_sqltune.select_workload_repository( '7485', '7653', bf, NULL, NULL, NULL, NULL, 1, NULL, 'ALL')
) P;
dbms_sqltune.load_sqlset(
sqlset_name=>'EOD_856CDNTC4XDRG'
, populate_cursor=>sqlset_cur
, load_option => 'MERGE'
, update_option => 'ACCUMULATE'
, sqlset_owner=>'SYS');
END;
/


3. Check the SQL Tuning set
SELECT * FROM TABLE(DBMS_SQLTUNE.SELECT_SQLSET( 'EOD_856CDNTC4XDRG' ) );

4. Load the BASELINE from SQL Tuning set
set serveroutput on
DECLARE
l_plans_loaded PLS_INTEGER;
BEGIN
l_plans_loaded := DBMS_SPM.load_plans_from_sqlset(
sqlset_name => 'EOD_856CDNTC4XDRG'
,sqlset_owner => 'SYS');
dbms_output.put_line ('loaded '||l_plans_loaded);
END;
/

5. Use EM Server->SQL Plan Control to manage the baselines

Tuesday, December 20, 2011

run data pump out of procedure (DBMS_DATAPUMP)

1. create directory
create directory clone_auto as '';
2. create user
create user dba_clone_user identified by test123;
grant connect, create database link, resource, create view to dba_clone_user;
grant unlimited tablespace to dba_clone_user;
grant exp_full_database to dba_clone_user;
grant read,write on directory clone_auto to dba_clone_user;
grant execute on dbms_flashback to dba_clone_user;
grant create table to dba_clone_user;
grant FLASHBACK ANY TABLE to dba_clone_user;


2. create procedure
CREATE OR REPLACE PROCEDURE dba_clone_user.start_export
IS
   hdl_job        NUMBER;
   l_cur_scn      NUMBER;
   l_job_state    VARCHAR2 (20);
   l_status       SYS.ku$_status1010;
   l_job_status   SYS.ku$_jobstatus1010;
BEGIN
   
   begin
    execute immediate 'drop table  DBA_CLONE_USER.AUTO_EXPORT';
   exception when others then null;
   end;
   
   hdl_job := DBMS_DATAPUMP.OPEN ( operation => 'EXPORT', job_mode => 'SCHEMA', job_name => 'AUTO_EXPORT');
   dbms_datapump.set_parameter(hdl_job, 'FLASHBACK_SCN', dbms_flashback.get_system_change_number);
   dbms_datapump.set_parameter(hdl_job, 'COMPRESSION', 'METADATA_ONLY');
   dbms_datapump.metadata_filter (handle => hdl_job, name => 'SCHEMA_LIST', value => '''USER1''');
   dbms_datapump.set_parallel(hdl_job, 5);
  
   DBMS_DATAPUMP.add_file (handle => hdl_job,
                           filename => 'user1%U.dmp',
                           directory => 'CLONE_AUTO',
                           --filesize   IN VARCHAR2 DEFAULT NULL,
                           filetype => DBMS_DATAPUMP.ku$_file_type_dump_file,
                           reusefile => 1
                          );
                         
   DBMS_DATAPUMP.add_file (handle => hdl_job,
                           filename => 'auto_export.log',
                           DIRECTORY => 'CLONE_AUTO',
                           --filesize   IN VARCHAR2 DEFAULT NULL,
                           filetype => DBMS_DATAPUMP.ku$_file_type_log_file,
                           reusefile => 1
                          );
                         
   DBMS_DATAPUMP.start_job (handle => hdl_job);
  
   DBMS_DATAPUMP.wait_for_job (handle => hdl_job, job_state => l_job_state);
  
   DBMS_OUTPUT.put_line ('Job exited with status:' || l_job_state);
  
END;
/

Thursday, July 7, 2011

transfer stats from one schema to another

exporting from schema ORIG_USER and importing into MYUSER

1. create stattab DBMS_STATS.CREATE_STAT_TABLE(ownname=>'ORIG_USER',stattab=>'STATS_FOR_TEST');
2. create table with list of tables: create table test_tabs(object_name varchar2(200));
insert into test_tabs (select ...)

3. export statsselect 'exec dbms_stats.export_table_stats(ownname=>''ORIG_USER'',tabname=>'''|| object_name ||''',stattab=>''STATS_FOR_TEST'');' cmd from (select distinct object_name from test_tabs) t;
4. delete stats on target schemaselect 'exec dbms_stats.import_table_stats(ownname=>''MYUSER'',tabname=>'''|| object_name ||''',force=>true);' cmd from (select distinct object_name from test_tabs) t;
5. copy table with stats: create table myuser.STATS_FOR_TEST as (select * from orig_user.STATS_FOR_TEST);
6. modify the MYUSER.STATS_FOR_TEST:update stats_for_test set c5='MYUSER';
update stats_for_test set ch1 = ora_hash(ch1) where ch1 is not null; <== to mask histogram values

7. import statsselect 'exec dbms_stats.import_table_stats(ownname=>''MYUSER'',tabname=>'''|| object_name ||''',stattab=>''STATS_FOR_TEST'',force=>true);' cmd from (select distinct object_name from test_tabs) t;

Thursday, March 10, 2011

STATS Locked after import

Metalink: ORA-38029 "Object Statistics Are Locked" - Possible Causes [ID 433240.1]

To prevent import (imp) from locking the table's statistics when importing a table without the rows (rows=n), use statistics=none. To prevent data pump import (impdp) from locking the table's statistics when importing a table without the rows (content=metadata_only), use exclude=(table_statistics,index_statistics).

Friday, November 5, 2010

GRANT OPTION

if a user has the GRANT OPTION he can pass this privilege on to other users through a view, without granting the privilege on the original source view:
SQL> grant select on dba_users to dbatest with grant option;
SQL> conn dbatest
SQL> create view da_users as select * from sys.dba_users;
SQL> grant select on da_users to dbatest2;
SQL> conn dbatest2
SQL> select count(*) from dbatest.da_users;

COUNT(*)
----------
40

SQL> select count(*) from sys.dba_users;
select count(*) from sys.dba_users
*
ERROR at line 1:
ORA-00942: table or view does not exist

Monday, October 25, 2010

DG broker connections

setevent = '16634 trace name context forever, level 1'and restart the Data Guard Broker Process. This will record all Connectstrings used by the Data Guard Broker in the DRC.LOG-Files. So you can see which exact Connect is actually being used by the Data Guard Broker

metalink: [ID 745201.1]

to start and top dgbALTER SYSTEM SET DG_BROKER_START=[TRUE/FALSE];

Wednesday, September 15, 2010

SQL trace file interpretation

http://wiki.oracle.com/page/Structure+of+an+Extended+SQL+Trace+file


XCTEND rlbk=%d rd_only=%d
XCTEND A transaction end marker.

rlbk 1 if a rollback was performed, 0 if no rollback (commit).
rd_only 1 if transaction was read only, 0 if changes occurred.

Monday, June 14, 2010

prefetch

Execution plan looking like this:

---------------------------------------------------------------------------
0 SELECT STATEMENT Optimizer=ALL_ROWS (Cost=322 Card=319 Bytes=11803)
1 0 TABLE ACCESS (BY INDEX ROWID) OF 'TARGET' (Cost=2 Card=1 Bytes=29)
2 1 NESTED LOOPS (Cost=322 Card=319 Bytes=11803)
3 2 TABLE ACCESS (FULL) OF 'DRIVER' (Cost=3 Card=319 Bytes=2552)
4 2 INDEX (RANGE SCAN) OF 'T_PK' (UNIQUE) (Cost=1 Card=1)


is using a feature called 'prefetch'.

More about it:
http://www.oracle.com/technology/products/oracle9i/daily/dec17.html
http://forums.oracle.com/forums/message.jspa?messageID=4236898

Thursday, September 24, 2009

to enable the CBO trace

alter session set tracefile_identifier='_10053';
alter session set events '10053 trace name context forever, level 1';
alter session set events '10053 trace name context off';


http://www.psoug.org/reference/trace_tkprof.html