Query:
select to_char(date, 'HH24:MI') as Timestamp,
count(case when type = 5 then 1 end) as Counts1,
count(case when type = 6 then 1 end) as Counts2,
from data
where date >= to_date('2022-10-27 01:00', 'YYYY-MM-DD HH24:MI')
and date <= to_date('2022-10-27 01:20', 'YYYY-MM-DD HH24:MI')
and type IN (5,6)
group by to_char(date, 'HH24:MI')
order by to_char(date, 'HH24:MI')
----------- ----------- ----------
| Timestamp | Counts1 | Counts2 |
----------- ----------- ----------
| 01:00 | 200 | 12 |
| 01:01 | 250 | 35 |
| 01:02 | 300 | 47 |
| 01:03 | 150 | 78 |
| 01:04 | 100 | 125 |
| 01:05 | 125 | 5 |
| 01:06 | 130 | 10 |
| 01:07 | 140 | 12 |
| 01:08 | 150 | 35 |
| 01:09 | 160 | 47 |
| 01:10 | 170 | 78 |
| 01:11 | 180 | 125 |
| 01:12 | 190 | 5 |
| 01:13 | 210 | 10 |
| 01:14 | 220 | 12 |
| 01:15 | 230 | 35 |
| 01:16 | 240 | 47 |
| 01:17 | 260 | 78 |
| 01:18 | 270 | 125 |
| 01:19 | 280 | 5 |
| 01:20 | 290 | 10 |
----------- ----------- ----------
From above query we are getting result for every 1 minute, we are looking for data sum of every 5 minutes interval from given timestamp.
Expected result:
----------- ----------- ----------
| Timestamp | Counts1 | Counts2 |
----------- ----------- ----------
| 01:05 | 1125 | 302 |
| 01:10 | 750 | 182 |
| 01:15 | 1030 | 187 |
| 01:20 | 1340 | 265 |
----------- ----------- ----------
Can some one help on this
Tried below:
select to_char(date interval '5' minute, 'HH24:MI') as Timestamp,
count(case when type = 5 then 1 end) as Counts1,
count(case when type = 6 then 1 end) as Counts2,
from data
where date >= to_date('2022-10-27 01:00', 'YYYY-MM-DD HH24:MI')
and date <= to_date('2022-10-27 01:20', 'YYYY-MM-DD HH24:MI')
and type IN (5,6)
group by to_char(date interval '5' minute, 'HH24:MI')
order by to_char(date interval '5' minute, 'HH24:MI')
Below is the result we got:
----------- ----------- ----------
| Timestamp | Counts1 | Counts2 |
----------- ----------- ----------
| 01:05 | 125 | 5 |
| 01:06 | 130 | 10 |
| 01:07 | 140 | 12 |
| 01:08 | 150 | 35 |
| 01:09 | 160 | 47 |
| 01:10 | 170 | 78 |
| 01:11 | 180 | 125 |
| 01:12 | 190 | 5 |
| 01:13 | 210 | 10 |
| 01:14 | 220 | 12 |
| 01:15 | 230 | 35 |
| 01:16 | 240 | 47 |
| 01:17 | 260 | 78 |
| 01:18 | 270 | 125 |
| 01:19 | 280 | 5 |
| 01:20 | 290 | 10 |
----------- ----------- ----------
We are looking for sum of every 5 minutes interval and expected result is below:
----------- ----------- ----------
| Timestamp | Counts1 | Counts2 |
----------- ----------- ----------
| 01:05 | 1125 | 302 |
| 01:10 | 750 | 182 |
| 01:15 | 1030 | 187 |
| 01:20 | 1340 | 265 |
----------- ----------- ----------
CodePudding user response:
You could use CONNECT BY
to generate the periods:
WITH dat AS
(
SELECT TO_DATE('01:00','HH24:MI') AS TIMESTAMP, 200 AS counts1, 12 AS counts2 FROM dual UNION ALL
SELECT TO_DATE('01:01','HH24:MI'), 250, 35 FROM dual UNION ALL
SELECT TO_DATE('01:02','HH24:MI'), 300, 47 FROM dual UNION ALL
SELECT TO_DATE('01:03','HH24:MI'), 150, 78 FROM dual UNION ALL
SELECT TO_DATE('01:04','HH24:MI'), 100, 125 FROM dual UNION ALL
SELECT TO_DATE('01:05','HH24:MI'), 125, 5 FROM dual UNION ALL
SELECT TO_DATE('01:06','HH24:MI'), 130, 10 FROM dual UNION ALL
SELECT TO_DATE('01:07','HH24:MI'), 140, 12 FROM dual UNION ALL
SELECT TO_DATE('01:08','HH24:MI'), 150, 35 FROM dual UNION ALL
SELECT TO_DATE('01:09','HH24:MI'), 160, 47 FROM dual UNION ALL
SELECT TO_DATE('01:10','HH24:MI'), 170, 78 FROM dual UNION ALL
SELECT TO_DATE('01:11','HH24:MI'), 180, 125 FROM dual UNION ALL
SELECT TO_DATE('01:12','HH24:MI'), 190, 5 FROM dual UNION ALL
SELECT TO_DATE('01:13','HH24:MI'), 210, 10 FROM dual UNION ALL
SELECT TO_DATE('01:14','HH24:MI'), 220, 12 FROM dual UNION ALL
SELECT TO_DATE('01:15','HH24:MI'), 230, 35 FROM dual UNION ALL
SELECT TO_DATE('01:16','HH24:MI'), 240, 47 FROM dual UNION ALL
SELECT TO_DATE('01:17','HH24:MI'), 260, 78 FROM dual UNION ALL
SELECT TO_DATE('01:18','HH24:MI'), 270, 125 FROM dual UNION ALL
SELECT TO_DATE('01:19','HH24:MI'), 280, 5 FROM dual UNION ALL
SELECT TO_DATE('01:20','HH24:MI'), 290, 10 FROM dual
)
SELECT periods.p_end, SUM(counts1), SUM(counts2)
FROM dat times
, (SELECT TO_DATE('01:00','HH24:MI') (DECODE(LEVEL,1,1,2)-1)/(24*60) 5*(LEVEL-1)/(24*60) AS p_start --start time offset
, TO_DATE('01:00','HH24:MI') 5*(LEVEL)/(24*60) AS p_end
FROM dual
CONNECT BY LEVEL <= 100 --adjust according to your data
) periods
WHERE times.TIMESTAMP >= periods.p_start
AND times.TIMESTAMP <= periods.p_end
GROUP BY periods.p_end;
Probably not the optimal solution, but it gets the results you want.
CodePudding user response:
With your sample data:
WITH
tbl AS
(
Select '01:00' "TIMESTAMP", 200 "COUNTS1", 12 "COUNTS2" From Dual Union All
Select '01:01', 250, 35 From Dual Union All
Select '01:02', 300, 47 From Dual Union All
Select '01:03', 150, 78 From Dual Union All
Select '01:04', 100, 125 From Dual Union All
Select '01:05', 125, 5 From Dual Union All
Select '01:06', 130, 10 From Dual Union All
Select '01:07', 140, 12 From Dual Union All
Select '01:08', 150, 35 From Dual Union All
Select '01:09', 160, 47 From Dual Union All
Select '01:10', 170, 78 From Dual Union All
Select '01:11', 180, 125 From Dual Union All
Select '01:12', 190, 5 From Dual Union All
Select '01:13', 210, 10 From Dual Union All
Select '01:14', 220, 12 From Dual Union All
Select '01:15', 230, 35 From Dual Union All
Select '01:16', 240, 47 From Dual Union All
Select '01:17', 260, 78 From Dual Union All
Select '01:18', 270, 125 From Dual Union All
Select '01:19', 280, 5 From Dual Union All
Select '01:20', 290, 10 From Dual
),
... you could create CTE with steps of 5 minutes:
grid AS
(
Select
t.TIMESTAMP "TIMESTAMP",
Min(s.STEP) "STEP"
From
tbl t
Inner Join
( Select LEVEL * 5 "STEP" From Dual d Connect By LEVEL <= 12 ) s ON(1 = 1)
WHERE
To_Number(SubStr(t.TIMESTAMP, 4, 2)) <= s.STEP
GROUP BY
t.TIMESTAMP
Order By
t.TIMESTAMP
)
... and join your data with the CTE, Group them by STEP and do the sum...
Select
SubStr(t.TIMESTAMP, 1, 3) || LPAD(STEP, 2, '0') "TIMESTAMP",
Sum(t.COUNTS1) "SUM_1",
Sum(t.COUNTS2) "SUM_2"
From
tbl t
INNER JOIN
grid g ON(g.TIMESTAMP = t.TIMESTAMP)
GROUP BY
SubStr(t.TIMESTAMP, 1, 3) || LPAD(STEP, 2, '0')
ORDER BY
SubStr(t.TIMESTAMP, 1, 3) || LPAD(STEP, 2, '0')
The result should be:
TIMESTAMP | SUM_1 | SUM_2 |
---|---|---|
01:05 | 1125 | 302 |
01:10 | 750 | 182 |
01:15 | 1030 | 187 |
01:20 | 1340 | 265 |
And if you want to do it every 10 mins then just change the Inner Join part of CTE to look like this:
...
( Select LEVEL * 10 "STEP" From Dual d Connect By LEVEL <= 6 )
...
... in that case the result would be:
TIMESTAMP | SUM_1 | SUM_2 |
---|---|---|
01:10 | 1875 | 484 |
01:20 | 2370 | 452 |
Regards...
CodePudding user response:
Use width_bucket function to break into 5 minute intervals. Then, add the number of buckets multipled by the bucket size to the start interval. Need to subtract one from bucket number as first bucket will be bucket number 1.
select to_char(to_date('2022-10-27 01:00', 'YYYY-MM-DD HH24:MI') -- interval start
(width_bucket(date
,to_date('2022-10-27 01:00', 'YYYY-MM-DD HH24:MI') -- interval start
,to_date('2022-10-27 01:20', 'YYYY-MM-DD HH24:MI') -- interval end
,4 -- number of buckets 20/5 = 4 minutes
)
-1 -- for zero offset on first interval
)
* 300/86400 -- to add 5 minute intervals to date
,'HH24:MI') timestamp,
count(case when type = 5 then 1 end) as Counts1,
count(case when type = 6 then 1 end) as Counts2
from data
where date >= to_date('2022-10-27 01:00', 'YYYY-MM-DD HH24:MI')
and date <= to_date('2022-10-27 01:20', 'YYYY-MM-DD HH24:MI')
and type IN (5,6)
group by to_char(to_date('2022-10-27 01:00', 'YYYY-MM-DD HH24:MI') -- interval start
(width_bucket(date
,to_date('2022-10-27 01:00', 'YYYY-MM-DD HH24:MI') -- interval start
,to_date('2022-10-27 01:20', 'YYYY-MM-DD HH24:MI') -- interval end
,4 -- number of buckets 20/5 = 4 minutes
)
-1 -- for zero offset on first interval
)
* 300/86400 -- to add 5 minute intervals to date
,'HH24:MI')
order by 1
Can also achieve the same result without the function. No need to calculate number of buckets with this approach.
select to_char(to_date('2022-10-27 01:00', 'YYYY-MM-DD HH24:MI') -- interval start
floor(
(date - to_date('2022-10-27 01:00', 'YYYY-MM-DD HH24:MI')) -- date minus interval start
/(300/86400) -- divide into 5 minute intervals
) -- gives interval number or bucket number
* 300/86400 -- multiply bucket number by bucket width
,'HH24:MI') timestamp,
count(case when type = 5 then 1 end) as Counts1,
count(case when type = 6 then 1 end) as Counts2
from data
where date >= to_date('2022-10-27 01:00', 'YYYY-MM-DD HH24:MI')
and date <= to_date('2022-10-27 01:20', 'YYYY-MM-DD HH24:MI')
and type IN (5,6)
group by to_char(to_date('2022-10-27 01:00', 'YYYY-MM-DD HH24:MI') -- interval start
floor(
(date - to_date('2022-10-27 01:00', 'YYYY-MM-DD HH24:MI')) -- date minus interval start
/(300/86400) -- divide into 5 minute intervals
) -- gives interval number or bucket number
* 300/86400 -- multiply bucket number by bucket width
,'HH24:MI')
order by 1
CodePudding user response:
Perhaps this may help.
ALTER SESSION SET NLS_DATE_FORMAT = 'DD-MON-YYYY HH24:MI:SS';
CREATE TABLE time_data (
seq_num NUMBER GENERATED BY DEFAULT AS IDENTITY (START WITH 1) NOT NULL,
dt DATE
);
Generate a row for each minute in a day. This is my test data. I suspect you have plenty of data.
INSERT INTO time_data(dt)
select timestamp '2022-11-01 00:00:00' numtodsinterval(rownum-1*1,'MINUTE')
from dual
CONNECT BY LEVEL <= (24*60);
Group into 5 minute intervals. Your data should show different counts.
select trunc(dt,'hh24') numtodsinterval(floor(to_char(dt,'mi') / 5) * 5,'minute') dt_5_min,
count(*) cnt
from time_data
group by trunc(dt,'hh24') numtodsinterval(floor(to_char(dt,'mi') / 5) * 5,'minute')
order by trunc(dt,'hh24') numtodsinterval(floor(to_char(dt,'mi') / 5) * 5,'minute')
/