Wednesday, February 8, 2017

Archive log generated per day or per hour



This is to find the historical archive log generation report.

Simple query to find the archive generated
===============================================================
select trunc(completion_time),count(1),round(count(1)*200/1024) from v$archived_log group by  trunc(completion_time) order by  trunc(completion_time);

Output:-
TRUNC(COMPLETIO   COUNT(1) ROUND(COUNT(1)*200/1024)
--------------- ---------- ------------------------
24-NOV-15              142                       28
25-NOV-15                4                        1
26-NOV-15                5                        1
27-NOV-15                5                        1
28-NOV-15                3                        1
29-NOV-15               14                        3
30-NOV-15                5                        1
01-DEC-15                5                        1
02-DEC-15                5                        1

Query to give size of archive generated perday in GB
============================================================

SELECT A.*,
Round(A.Count#*B.AVG#/1024/1024/1024) Daily_Avg_GB
FROM
(
SELECT
To_Char(First_Time,'YYYY-MM-DD') DAY,
Count(1) Count#,
Min(RECID) Min#,
Max(RECID) Max#
FROM
v$log_history
GROUP
BY To_Char(First_Time,'YYYY-MM-DD')
ORDER
BY 1 DESC
) A,
(
SELECT
Avg(BYTES) AVG#,
Count(1) Count#,
Max(BYTES) Max_Bytes,
Min(BYTES) Min_Bytes
FROM
v$log
) B;

Output:-

Press Return to Continue

DAY            COUNT#       MIN#       MAX# DAILY_AVG_GB
---------- ---------- ---------- ---------- ------------
2017-02-09          3      31560      31562            1
2017-02-08          6      31554      31559            1
2017-02-07          7      31547      31553            1
2017-02-06          7      31540      31546            1
2017-02-05         14      31526      31539            3
2017-02-04          4      31522      31525            1
2017-02-03          6      31516      31521            1
2017-02-02         12      31504      31515            2
2017-02-01          6      31498      31503            1
2017-01-31          9      31489      31497            2
2017-01-30         10      31479      31488            2
2017-01-29         15      31464      31478            3
2017-01-28          4      31460      31463            1
2017-01-27          6      31454      31459            1

Query to give archive generation on an hourly basis
==================================================================

set pagesize 120;
set linesize 200;
col day for a8;
select
  to_char(first_time,'YY-MM-DD') day,
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'00',1,0)),'999') "00",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'01',1,0)),'999') "01",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'02',1,0)),'999') "02",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'03',1,0)),'999') "03",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'04',1,0)),'999') "04",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'05',1,0)),'999') "05",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'06',1,0)),'999') "06",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'07',1,0)),'999') "07",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'08',1,0)),'999') "08",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'09',1,0)),'999') "09",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'10',1,0)),'999') "10",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'11',1,0)),'999') "11",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'12',1,0)),'999') "12",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'13',1,0)),'999') "13",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'14',1,0)),'999') "14",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'15',1,0)),'999') "15",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'16',1,0)),'999') "16",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'17',1,0)),'999') "17",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'18',1,0)),'999') "18",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'19',1,0)),'999') "19",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'20',1,0)),'999') "20",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'21',1,0)),'999') "21",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'22',1,0)),'999') "22",
  to_char(sum(decode(substr(to_char(first_time,'HH24'),1,2),'23',1,0)),'999') "23",
  COUNT(*)
from v$log_history
group by to_char(first_time,'YY-MM-DD')
order by day ;

Output:-




For sizing of archive log destination, 
1.it should be twice the size of maximum archive generated per day.
2.compressed backup of archive logs comes to 30 % of actual archive log size.



> to know archives generated in a month day wise

select to_char(first_time,’DD-MON’),count(*) from v$log_history
where to_char(first_time,’MON’)=’DEC’
group by to_char(first_time,’DD-MON’);


Output:-

TO_CHAR(FIRS   COUNT(*)
------------ ----------
12-DEC              186
20-DEC               20
24-DEC               17
10-DEC               14

13-DEC              160
> to know archives generated in a day

select to_char(first_time,’DD-MON’),count(*) from v$log_history
where to_char(first_time,’DD’)=10
group by to_char(first_time,’DD-MON’);

(Where 10 in query is date)

Output-
TO_CHAR(FIRS   COUNT(*)
------------ ----------
10-JAN               17
10-MAR               18
10-JUL               24
10-AUG               34

10-DEC               14
> to know archives generated in a day including time

select to_char(first_time,’DD-MON:hh24:mi:ss’) from v$log_history
where to_char(first_time,’DD’)=15;


OutPut-

TO_CHAR(FIRST_TIME,'D
---------------------
15-JUL:00:03:54
15-JUL:04:29:29
15-JUL:11:04:37
15-JUL:14:40:48
15-JUL:21:17:38

15-AUG:02:49:35
> to know archives generated on specific date in hourly basis
select to_char(first_time,’hh24′),count(*) from v$log_history
where to_char(first_time,’dd-mm-yy’) in(’08-02-17′)
group by to_char(first_time,’hh24′);
> Display Archive Generation in GB by Day
SET PAUSE ON
SET PAUSE 'Press Return to Continue'
SET PAGESIZE 60
SET LINESIZE 300
SET VERIFY OFF
 
COL "Generation Date" FORMAT a20
 
SELECT TRUNC(completion_time)  "Generation Date" ,
   round(SUM(blocks*block_size)/1048576,0) "Total for the Day in MB"
FROM gv$archived_log
GROUP BY TRUNC(completion_time)
ORDER BY TRUNC(completion_time)
/

OutPut:-

Generation Date      Total for the Day in MB
-------------------- -----------------------
24-NOV-15                              25857
25-NOV-15                                627
26-NOV-15                                790
27-NOV-15                                788



Find a session which are generating more archive logs

To find sessions generating lots of redo, you can use either of the following methods. Both methods examine the amount of undo generated. When a transaction generates undo, it will automatically generate redo as well.
The methods are: 1) Query V$SESS_IO. This view contains the column BLOCK_CHANGES which indicates how much blocks have been changed by the session. High values indicate a session generating lots of redo.
The query you can use is:
 SQL> SELECT s.sid, s.serial#, s.username, s.program,
  i.block_changes
  FROM v$session s, v$sess_io i
  WHERE s.sid = i.sid
  ORDER BY 5 desc, 1, 2, 3, 4;

 SID SERIAL# USERNAME      PROGRAM       BLOCK_CHANGES
---------- ---------- ------------ -----------------------
 158 6 SCOTT               sqlplus.exe       630295
 159 3 SYS                 sqlplus.exe       97
 161 1                     ORACLE.EXE (MMON) 58
 164 1 ORACLE.EXE          (SMON)            34
 148 5 ORACLE.EXE          (q001)            0
........ 
19 rows selected.

Run the query multiple times and examine the delta between each occurrence of BLOCK_CHANGES. Large deltas indicate high redo generation by the session. Like Scott user with Sid 158 is having high value for Block_changes and is the main session for generating more archive logs.
2) Query V$TRANSACTION. This view contains information about the amount of undo blocks and undo records accessed by the transaction (as found in the USED_UBLK and USED_UREC columns).
The query you can use is:
 SQL> SELECT s.sid, s.serial#, s.username, s.program, 
  t.used_ublk, t.used_urec
  FROM v$session s, v$transaction t
  WHERE s.taddr = t.addr
  ORDER BY 5 desc, 6 desc, 1, 2, 3, 4;

 SID SERIAL# USERNAME PROGRAM        USED_UBLK USED_UREC
---------- ---------- ------------ ---------------------------
 158 6       SCOTT    sqlplus.exe    4929      157526

SQL>
Run the query multiple times and examine the delta between each occurrence of USED_UBLK and USED_UREC. Large deltas indicate high redo generation by the session.
You use the first query when you need to check for programs generating lots of redo when these programs activate more than one transaction. The latter query can be used to find out which particular transactions are generating redo.
From the above example we can see that user Scott is generating more archive logs. To know which SQL statment Scott user is executed
SQL> select sql.sql_text sql_text, t.USED_UREC Records, t.USED_UBLK Blocks,
 (t.USED_UBLK*8192/1024) KBytes from v$transaction t,
 v$session s,
 v$sql sql
 where t.addr = s.taddr
 and s.sql_id = sql.sql_id
 and s.username ='&USERNAME';
Enter value for username: SCOTT
old 7: and s.username ='&USERNAME'
new 7: and s.username ='SCOTT'
SQL_TEXT                                 RECORDS BLOCKS KBYTES
---------- ---------- ----------
insert into emp_c select * from emp_c    157526   4929  3943
Source-oracleracdba1.wordpress.com

No comments:

Post a Comment