Maintaining Trail | Multiple Databases | Audit Trail Warehouse | Oracle Audit Vault Alternative | Aud$ LOGS
Maintaining Trail | Multiple Databases | Audit Trail Warehouse | Oracle Audit Vault Alternative | Aud$ LOGS
-- Script upload's datapump file generated from copy of aud$ table to destination host through sqlNet.
-- 25.09.2016
-- Kamil Piórek
-- kamil.piorekk@gmail.com
DECLARE
h1 NUMBER;
fname varchar(60);
fname_log varchar(30);
last_load number ;
vst varchar2(300) ;
BEGIN
-- IF LAST LOAD WAS NOT SUSCCESFULL THEN LOAD TABLE AGAIN TABLE P4ADM.AUD ELSE - IF LAST LOAD WAS SUCCESSFULL THEN RELOAD TABLE WITH NEW DATA FROM AUD$ TABLE
-- SELECT ISSENDED into last_load FROM P4ADM.AUD_EXPORT_HISTORY ;
SELECT count(*) INTO last_load
FROM P4ADM.AUD_EXPORT_HISTORY
WHERE "DATE" > sysdate - interval '27' hour ;
IF last_load = 0 THEN
vst:='INSERT INTO P4ADM.AUD SELECT * FROM AUD$ WHERE NTIMESTAMP# < TRUNC(SYSDATE)';
execute immediate vst;
ELSE
vst:='DROP TABLE P4ADM.AUD';
execute immediate vst;
vst:='CREATE TABLE P4ADM.AUD TABLESPACE TBS_P4ADM AS SELECT * FROM AUD$ WHERE NTIMESTAMP# < TRUNC(SYSDATE)';
execute immediate vst;
END IF ;
--- END IF
-- generate filenames
SELECT lower((name)||TO_CHAR(TRUNC(SYSDATE-1),'yyyymmdd'))||'.dmp' INTO fname
FROM v$database ;
SELECT lower((name)||TO_CHAR(TRUNC(SYSDATE-1),'yyyymmdd'))||'.dmp' INTO fname_log
FROM v$database ;
-- datapump api calls
BEGIN
h1 :=
DBMS_DATAPUMP.open (operation => 'EXPORT',
job_mode => 'TABLE',
job_name => 'DBA_AUDIT_EXP_JOB',
version => 'COMPATIBLE');
END;
BEGIN
DBMS_DATAPUMP.set_parallel (handle => h1, degree => 1);
END;
BEGIN
DBMS_DATAPUMP.add_file (handle => h1,
filename => fname_log,
directory => 'DATA_PUMP_DIR',
filetype => 3);
END;
BEGIN
DBMS_DATAPUMP.set_parameter (handle => h1,
name => 'KEEP_MASTER',
VALUE => 0);
END;
BEGIN
DBMS_DATAPUMP.metadata_filter (handle => h1,
name => 'SCHEMA_EXPR',
VALUE => 'IN(''P4ADM'')');
END;
BEGIN
DBMS_DATAPUMP.metadata_filter (
handle => h1,
name => 'NAME_EXPR',
VALUE => 'IN(''AUD'')'
);
END;
BEGIN
DBMS_DATAPUMP.add_file (handle => h1,
filename => fname,
directory => 'DATA_PUMP_DIR',
filetype => 1);
END;
BEGIN
DBMS_DATAPUMP.start_job (handle => h1, skip_current => 0, abort_step => 0);
END;
BEGIN
DBMS_DATAPUMP.detach (handle => h1);
END;
-- wait a oment to complet dump creation on filesystem
dbms_lock.sleep(25);
-- send file to remote host ( It' can't be so easy so You must write Your own import scripts ) -- good luck @!
BEGIN
DBMS_FILE_TRANSFER.PUT_FILE(
source_directory_object => 'DATA_PUMP_DIR',
source_file_name => fname.dmp,
destination_directory_object => 'DATA_PUMP_DIR',
destination_file_name => fname.dmp,
destination_database => 'AWRDB');
END;
-------------
END;
------------------------------
--USEFULL STUFF
/*
SELECT o.status, o.object_id, o.object_type,
o.owner||'.'||object_name "OWNER.OBJECT",o.created,j.state
FROM dba_objects o, dba_datapump_jobs j
WHERE o.owner=j.owner_name AND o.object_name=j.job_name
and o.owner='&SCHEMA' and j.state='NOT RUNNING' ORDER BY 4,2;
select * from DBA_DATAPUMP_JOBS
DBA_AUDIT_EXP_JOB
select * From dba_scheduler_jobs where job_name like 'DBA_AUDIT_EXP_JOB'
select owner_name,job_name,operation,state from DBA_DATAPUMP_JOBS where owner_name='SYS';
DROP TABLE SYS.AUDIT_EXPORT_NEW
DROP TABLE SYS.DBA_AUDIT_EXP_JOB
*/
-- Script upload's datapump file generated from copy of aud$ table to destination host through sqlNet.
-- 25.09.2016
-- Kamil Piórek
-- kamil.piorekk@gmail.com
DECLARE
h1 NUMBER;
fname varchar(60);
fname_log varchar(30);
last_load number ;
vst varchar2(300) ;
BEGIN
-- IF LAST LOAD WAS NOT SUSCCESFULL THEN LOAD TABLE AGAIN TABLE P4ADM.AUD ELSE - IF LAST LOAD WAS SUCCESSFULL THEN RELOAD TABLE WITH NEW DATA FROM AUD$ TABLE
-- SELECT ISSENDED into last_load FROM P4ADM.AUD_EXPORT_HISTORY ;
SELECT count(*) INTO last_load
FROM P4ADM.AUD_EXPORT_HISTORY
WHERE "DATE" > sysdate - interval '27' hour ;
IF last_load = 0 THEN
vst:='INSERT INTO P4ADM.AUD SELECT * FROM AUD$ WHERE NTIMESTAMP# < TRUNC(SYSDATE)';
execute immediate vst;
ELSE
vst:='DROP TABLE P4ADM.AUD';
execute immediate vst;
vst:='CREATE TABLE P4ADM.AUD TABLESPACE TBS_P4ADM AS SELECT * FROM AUD$ WHERE NTIMESTAMP# < TRUNC(SYSDATE)';
execute immediate vst;
END IF ;
--- END IF
-- generate filenames
SELECT lower((name)||TO_CHAR(TRUNC(SYSDATE-1),'yyyymmdd'))||'.dmp' INTO fname
FROM v$database ;
SELECT lower((name)||TO_CHAR(TRUNC(SYSDATE-1),'yyyymmdd'))||'.dmp' INTO fname_log
FROM v$database ;
-- datapump api calls
BEGIN
h1 :=
DBMS_DATAPUMP.open (operation => 'EXPORT',
job_mode => 'TABLE',
job_name => 'DBA_AUDIT_EXP_JOB',
version => 'COMPATIBLE');
END;
BEGIN
DBMS_DATAPUMP.set_parallel (handle => h1, degree => 1);
END;
BEGIN
DBMS_DATAPUMP.add_file (handle => h1,
filename => fname_log,
directory => 'DATA_PUMP_DIR',
filetype => 3);
END;
BEGIN
DBMS_DATAPUMP.set_parameter (handle => h1,
name => 'KEEP_MASTER',
VALUE => 0);
END;
BEGIN
DBMS_DATAPUMP.metadata_filter (handle => h1,
name => 'SCHEMA_EXPR',
VALUE => 'IN(''P4ADM'')');
END;
BEGIN
DBMS_DATAPUMP.metadata_filter (
handle => h1,
name => 'NAME_EXPR',
VALUE => 'IN(''AUD'')'
);
END;
BEGIN
DBMS_DATAPUMP.add_file (handle => h1,
filename => fname,
directory => 'DATA_PUMP_DIR',
filetype => 1);
END;
BEGIN
DBMS_DATAPUMP.start_job (handle => h1, skip_current => 0, abort_step => 0);
END;
BEGIN
DBMS_DATAPUMP.detach (handle => h1);
END;
-- wait a oment to complet dump creation on filesystem
dbms_lock.sleep(25);
-- send file to remote host ( It' can't be so easy so You must write Your own import scripts ) -- good luck @!
BEGIN
DBMS_FILE_TRANSFER.PUT_FILE(
source_directory_object => 'DATA_PUMP_DIR',
source_file_name => fname.dmp,
destination_directory_object => 'DATA_PUMP_DIR',
destination_file_name => fname.dmp,
destination_database => 'AWRDB');
END;
-------------
END;
------------------------------
--USEFULL STUFF
/*
SELECT o.status, o.object_id, o.object_type,
o.owner||'.'||object_name "OWNER.OBJECT",o.created,j.state
FROM dba_objects o, dba_datapump_jobs j
WHERE o.owner=j.owner_name AND o.object_name=j.job_name
and o.owner='&SCHEMA' and j.state='NOT RUNNING' ORDER BY 4,2;
select * from DBA_DATAPUMP_JOBS
DBA_AUDIT_EXP_JOB
select * From dba_scheduler_jobs where job_name like 'DBA_AUDIT_EXP_JOB'
select owner_name,job_name,operation,state from DBA_DATAPUMP_JOBS where owner_name='SYS';
DROP TABLE SYS.AUDIT_EXPORT_NEW
DROP TABLE SYS.DBA_AUDIT_EXP_JOB
*/
Komentarze
Prześlij komentarz