Tuesday, December 20, 2011

run data pump out of procedure (DBMS_DATAPUMP)

1. create directory
create directory clone_auto as '';
2. create user
create user dba_clone_user identified by test123;
grant connect, create database link, resource, create view to dba_clone_user;
grant unlimited tablespace to dba_clone_user;
grant exp_full_database to dba_clone_user;
grant read,write on directory clone_auto to dba_clone_user;
grant execute on dbms_flashback to dba_clone_user;
grant create table to dba_clone_user;
grant FLASHBACK ANY TABLE to dba_clone_user;


2. create procedure
CREATE OR REPLACE PROCEDURE dba_clone_user.start_export
IS
   hdl_job        NUMBER;
   l_cur_scn      NUMBER;
   l_job_state    VARCHAR2 (20);
   l_status       SYS.ku$_status1010;
   l_job_status   SYS.ku$_jobstatus1010;
BEGIN
   
   begin
    execute immediate 'drop table  DBA_CLONE_USER.AUTO_EXPORT';
   exception when others then null;
   end;
   
   hdl_job := DBMS_DATAPUMP.OPEN ( operation => 'EXPORT', job_mode => 'SCHEMA', job_name => 'AUTO_EXPORT');
   dbms_datapump.set_parameter(hdl_job, 'FLASHBACK_SCN', dbms_flashback.get_system_change_number);
   dbms_datapump.set_parameter(hdl_job, 'COMPRESSION', 'METADATA_ONLY');
   dbms_datapump.metadata_filter (handle => hdl_job, name => 'SCHEMA_LIST', value => '''USER1''');
   dbms_datapump.set_parallel(hdl_job, 5);
  
   DBMS_DATAPUMP.add_file (handle => hdl_job,
                           filename => 'user1%U.dmp',
                           directory => 'CLONE_AUTO',
                           --filesize   IN VARCHAR2 DEFAULT NULL,
                           filetype => DBMS_DATAPUMP.ku$_file_type_dump_file,
                           reusefile => 1
                          );
                         
   DBMS_DATAPUMP.add_file (handle => hdl_job,
                           filename => 'auto_export.log',
                           DIRECTORY => 'CLONE_AUTO',
                           --filesize   IN VARCHAR2 DEFAULT NULL,
                           filetype => DBMS_DATAPUMP.ku$_file_type_log_file,
                           reusefile => 1
                          );
                         
   DBMS_DATAPUMP.start_job (handle => hdl_job);
  
   DBMS_DATAPUMP.wait_for_job (handle => hdl_job, job_state => l_job_state);
  
   DBMS_OUTPUT.put_line ('Job exited with status:' || l_job_state);
  
END;
/

Thursday, July 7, 2011

transfer stats from one schema to another

exporting from schema ORIG_USER and importing into MYUSER

1. create stattab DBMS_STATS.CREATE_STAT_TABLE(ownname=>'ORIG_USER',stattab=>'STATS_FOR_TEST');
2. create table with list of tables: create table test_tabs(object_name varchar2(200));
insert into test_tabs (select ...)

3. export statsselect 'exec dbms_stats.export_table_stats(ownname=>''ORIG_USER'',tabname=>'''|| object_name ||''',stattab=>''STATS_FOR_TEST'');' cmd from (select distinct object_name from test_tabs) t;
4. delete stats on target schemaselect 'exec dbms_stats.import_table_stats(ownname=>''MYUSER'',tabname=>'''|| object_name ||''',force=>true);' cmd from (select distinct object_name from test_tabs) t;
5. copy table with stats: create table myuser.STATS_FOR_TEST as (select * from orig_user.STATS_FOR_TEST);
6. modify the MYUSER.STATS_FOR_TEST:update stats_for_test set c5='MYUSER';
update stats_for_test set ch1 = ora_hash(ch1) where ch1 is not null; <== to mask histogram values

7. import statsselect 'exec dbms_stats.import_table_stats(ownname=>''MYUSER'',tabname=>'''|| object_name ||''',stattab=>''STATS_FOR_TEST'',force=>true);' cmd from (select distinct object_name from test_tabs) t;

Thursday, March 10, 2011

STATS Locked after import

Metalink: ORA-38029 "Object Statistics Are Locked" - Possible Causes [ID 433240.1]

To prevent import (imp) from locking the table's statistics when importing a table without the rows (rows=n), use statistics=none. To prevent data pump import (impdp) from locking the table's statistics when importing a table without the rows (content=metadata_only), use exclude=(table_statistics,index_statistics).