Sql 我想你最大的问题是试图用临时表“优化”你的查询,这应该是一个简单的聚合查询。您所做的在某些情况下确实有效(有时是必要的),但您使用的版本没有任何帮助-在价格表中有一个自联接,这可能是一个巨大的违规行为。这里的日期范围是多少?您知道,如果数据运行超过一个月,
Sql 我想你最大的问题是试图用临时表“优化”你的查询,这应该是一个简单的聚合查询。您所做的在某些情况下确实有效(有时是必要的),但您使用的版本没有任何帮助-在价格表中有一个自联接,这可能是一个巨大的违规行为。这里的日期范围是多少?您知道,如果数据运行超过一个月,,sql,postgresql,lag,average,temporary,Sql,Postgresql,Lag,Average,Temporary,我想你最大的问题是试图用临时表“优化”你的查询,这应该是一个简单的聚合查询。您所做的在某些情况下确实有效(有时是必要的),但您使用的版本没有任何帮助-在价格表中有一个自联接,这可能是一个巨大的违规行为。这里的日期范围是多少?您知道,如果数据运行超过一个月,您可能会有重复的天/小时元组,对吗?问题的一部分可能是由于使用了显式的临时表——RDBMS通常非常适合只构建查询所需的内容,因此将其组合到一个查询中可能会有所帮助。不过40GB似乎太大了。我想你有合适的指数?另外,如果在窗口函数中没有指定ORD
我想你最大的问题是试图用临时表“优化”你的查询,这应该是一个简单的聚合查询。您所做的在某些情况下确实有效(有时是必要的),但您使用的版本没有任何帮助-在
价格表中有一个自联接,这可能是一个巨大的违规行为。这里的日期范围是多少?您知道,如果数据运行超过一个月,您可能会有重复的天
/小时
元组,对吗?问题的一部分可能是由于使用了显式的临时表——RDBMS通常非常适合只构建查询所需的内容,因此将其组合到一个查询中可能会有所帮助。不过40GB似乎太大了。我想你有合适的指数?另外,如果在窗口函数中没有指定ORDER BY
,我担心LAG()
可能会给出“未定义”的结果(取决于优化器的突发奇想)。您的表中没有任何PK或FK。另外,timstamp的默认值看起来是错误的。我接受了这个答案,因为它提供了我所需要的建议。时段表非常有用。我自己也没有用过,现在我已经知道,在特定的情况下,这实际上是一种常见的做法。我自己从来没有想过。我的主要问题也解决了,在我的VPS上执行查询不再有问题。您的解决方案还向我展示了如何更好地组织代码。总之,这个答案优化了代码,简化了代码,甚至使它工作起来。非常感谢。
-- Table: "Prices"
-- DROP TABLE "Prices";
CREATE TABLE "Prices"
(
data_id integer,
level smallint,
sell_price integer,
buy_price integer,
sell_count integer,
buy_count integer, --
name character varying(100),
date timestamp without time zone DEFAULT ('now'::text)::timestamp(1) without time zone,
"ID" serial NOT NULL,
CONSTRAINT "Prices_pkey" PRIMARY KEY ("ID")
)
WITH (
OIDS=FALSE
);
-- Index: "Prices_ID_idx"
CREATE INDEX "Prices_ID_idx"
ON "Prices"
USING btree
("ID");
-- Index: "Prices_data_id_idx"
CREATE INDEX "Prices_data_id_idx"
ON "Prices"
USING btree
(data_id);
-- Table: "HourlyData"
CREATE TABLE "HourlyData"
(
data_id bigint,
name character varying(100),
date_time timestamp without time zone,
hour integer,
day integer,
buy numeric(20,4),
sell numeric(20,4),
prev_buy numeric(20,4),
prev_sell numeric(20,4),
buy_count integer,
sell_count integer,
prev_buy_count integer,
prev_sell_count integer,
ab_change numeric(10,2),
as_change numeric(10,2),
abc_change numeric(10,2),
asc_change numeric(10,2),
"ID" integer NOT NULL DEFAULT nextval('"DailyData_ID_seq"'::regclass),
CONSTRAINT "DailyData_pkey" PRIMARY KEY ("ID")
)
WITH (
OIDS=FALSE
);
-- Index: "DailyData_data_id_idx"
CREATE INDEX "DailyData_data_id_idx"
ON "HourlyData"
USING btree
(data_id);
INSERT INTO Prices
("data_id", "level", "sell_price", "buy_price", "sell_count", "buy_count", "name", "date")
VALUES
(28262, 80, 18899, 15000, 53, 66, 'random_item', '2013-12-16 01:38:07'),
(28262, 80, 18899, 15000, 53, 66, 'random_item', '2013-12-16 01:44:31'),
(28262, 80, 18987, 15000, 46, 65, 'random_item', '2013-12-16 01:30:22'),
(28262, 80, 18987, 16000, 49, 65, 'random_item', '2013-12-16 01:00:19'),
(28265, 80, 18987, 16000, 48, 64, 'random_itema', '2013-12-16 01:30:20'),
(28265, 80, 18987, 16000, 48, 64, 'random_itema', '2013-12-16 01:00:21'),
(28265, 80, 17087, 16000, 49, 63, 'random_itema', '2013-12-16 01:30:22'),
(28262, 80, 18980, 5028, 48, 62, 'random_item', '2013-12-16 10:00:28'),
(28262, 80, 18975, 5528, 50, 60, 'random_item', '2013-12-16 10:30:30'),
(28262, 80, 18975, 5228, 51, 59, 'random_item', '2013-12-16 10:00:27'),
(28262, 80, 18975, 5500, 52, 59, 'random_item', '2013-12-16 10:30:21'),
(28262, 80, 18975, 5600, 53, 59, 'random_item', '2013-12-16 10:00:23'),
(28262, 80, 18979, 5700, 50, 58, 'random_item', '2013-12-16 10:30:28'),
(28262, 80, 18977, 5028, 51, 56, 'random_item', '2013-12-16 10:00:23'),
(28264, 80, 18978, 5028, 51, 54, 'random_itemaw', '2013-12-16 10:30:25'),
(28264, 80, 18979, 5628, 50, 54, 'random_itemaw', '2013-12-16 10:00:28'),
(28264, 80, 18979, 5028, 52, 64, 'random_itemaw', '2013-12-16 10:30:26'),
(28264, 80, 18979, 15028, 52, 64, 'random_item', '2013-12-16 11:00:25'),
(28264, 80, 17977, 15028, 56, 63, 'random_item', '2013-12-16 11:30:24'),
(28264, 80, 17977, 15029, 58, 62, 'random_item', '2013-12-16 11:00:30'),
(28262, 80, 17977, 15027, 58, 62, 'random_item', '2013-12-16 11:30:22'),
(28262, 80, 16000, 15022, 59, 49, 'random_item', '2013-12-16 11:00:26'),
(28262, 80, 17979, 15021, 56, 49, 'random_item', '2013-12-16 11:30:26'),
(28262, 80, 17969, 15023, 58, 44, 'random_item', '2013-12-16 11:00:31'),
(28262, 80, 18987, 15027, 48, 44, 'random_item', '2013-12-16 12:30:33'),
(28262, 80, 20819, 15027, 40, 43, 'random_item', '2013-12-16 12:00:32'),
(28262, 80, 21810, 15034, 37, 48, 'random_item', '2013-12-16 12:30:24'),
(28262, 80, 21810, 15037, 39, 49, 'random_item', '2013-12-16 22:00:18'),
(28262, 80, 21810, 15038, 39, 49, 'random_item', '2013-12-16 22:30:25'),
(28262, 80, 21810, 15038, 39, 49, 'random_item', '2013-12-16 22:00:25'),
(28262, 80, 21710, 15039, 40, 49, 'random_item', '2013-12-16 22:30:24'),
(28262, 80, 21709, 15040, 41, 49, 'random_item', '2013-12-16 22:00:24'),
(28262, 80, 21709, 15040, 41, 49, 'random_item', '2013-12-16 22:30:22'),
(28262, 80, 21709, 15040, 41, 49, 'random_item', '2013-12-16 23:00:24'),
(28262, 80, 21709, 15041, 41, 49, 'random_item', '2013-12-16 23:30:27'),
(28266, 80, 21708, 15042, 42, 50, 'random_item1', '2013-12-17 05:00:26'),
(28266, 80, 20000, 15041, 43, 49, 'random_item1', '2013-12-17 05:30:21'),
(28266, 80, 20000, 15097, 43, 52, 'random_item1', '2013-12-17 05:00:28'),
(28262, 80, 20000, 15097, 43, 52, 'random_item', '2013-12-17 05:30:28'),
(28262, 80, 20000, 15097, 43, 52, 'random_item', '2013-12-17 05:00:31'),
(28262, 80, 20000, 15097, 44, 51, 'random_item', '2013-12-17 05:30:34'),
(28262, 80, 19997, 15097, 44, 47, 'random_item', '2013-12-17 05:00:20'),
(28262, 80, 19997, 15098, 44, 50, 'random_item', '2013-12-17 05:30:26'),
(28262, 80, 19997, 15098, 44, 50, 'random_item', '2013-12-17 05:00:24'),
(28262, 80, 19997, 15098, 44, 49, 'random_item', '2013-12-17 05:35:44'),
(28262, 80, 19996, 15098, 45, 48, 'random_item', '2013-12-17 05:00:22'),
(28262, 80, 19996, 15097, 46, 47, 'random_item', '2013-12-17 05:30:24'),
(28262, 80, 19996, 15097, 46, 47, 'random_item', '2013-12-17 05:00:29'),
(28262, 80, 19996, 15097, 46, 47, 'random_item', '2013-12-17 05:30:24'),
(28262, 80, 19996, 15041, 47, 46, 'random_item', '2013-12-17 05:00:25')
;
-- Function: percentageincrease(numeric, numeric)
CREATE OR REPLACE FUNCTION percentageincrease(lugeja numeric, nimetaja numeric)
RETURNS numeric AS
$BODY$
BEGIN
IF Nimetaja IS NULL or Nimetaja = 0
THEN RETURN 0;
ELSE
RETURN ROUND((Lugeja - Nimetaja) / Nimetaja * 100, 2);
END IF;
END;
$BODY$
LANGUAGE plpgsql;
-- Function: process_hourly_data()
CREATE OR REPLACE FUNCTION process_hourly_data()
RETURNS void AS
$BODY$
CREATE TEMP TABLE "TEMP_summarize1" AS
SELECT
prices.data_id AS data_id,
prices.name AS name,
date_part('hour', prices.date) AS hour,
date_part('day', prices.date) AS day,
prices.date AS date_var,
prices.buy_price,
prices.sell_price,
prices.sell_count,
prices.buy_count
FROM "Prices" as prices;
CREATE TEMP TABLE "TEMP_summarize2"
(
item_name character varying(100),
data_id bigint,
hour integer,
day integer,
date_var timestamp without time zone,
avgbuy smallint,
avgsell smallint,
avgsellCount smallint,
avgbuyCount smallint
);
PREPARE TEMP2 AS
INSERT INTO "TEMP_summarize2"
SELECT
MAX(whatever.name) as item_name,
whatever.data_id,
prices.hour,
MAX(prices.day) as day,
MAX(prices.date_var) as date_var,
AVG(prices.buy_price) AS avgbuy,
AVG(prices.sell_price) AS avgsell,
AVG(prices.sell_count) AS avgsellCount,
AVG(prices.buy_count) AS avgbuyCount
FROM "TEMP_summarize1" AS prices, (SELECT data_id, name FROM "TEMP_summarize1" as whatever) AS whatever
WHERE whatever.data_id = prices.data_id AND whatever.name = prices.name
GROUP BY hour, whatever.data_id;
PREPARE TEMP3 AS
INSERT INTO "HourlyData"
SELECT
data_id,
item_name,
date_var,
hour,
day,
avgbuy,
avgsell,
LAG(avgbuy, 1, NULL) OVER(PARTITION BY data_id) AS last_avgbuy,
LAG(avgsell, 1, NULL) OVER(PARTITION BY data_id) AS last_avgsell,
avgsellCount,
avgbuyCount,
LAG(avgsellCount, 1, NULL) OVER (PARTITION BY data_id) AS last_avgsellCount,
LAG(avgbuyCount, 1, NULL) OVER (PARTITION BY data_id) AS last_avgbuyCount,
percentageincrease(LAG(avgbuy, 1, NULL) OVER(PARTITION BY data_id), avgbuy),
percentageincrease(LAG(avgsell, 1, NULL) OVER(PARTITION BY data_id), avgsell),
percentageincrease(LAG(avgsellCount, 1, NULL) OVER (PARTITION BY data_id), avgsellCount),
percentageincrease(LAG(avgbuyCount, 1, NULL) OVER (PARTITION BY data_id), avgbuyCount)
FROM "TEMP_summarize2"
ORDER BY data_id, hour;
EXECUTE TEMP2;
EXECUTE TEMP3;
$BODY$
LANGUAGE sql;
SET work_mem = '1MB'
DO $$
DECLARE
t_1 foo;
t_2 foo;
t_3 foo;
r foo;
FOR r IN SELECT * FROM foo ORDER BY some_col
LOOP
/* move history records */
t_3 := t_2;
t_2 := t_1;
t_1 := r;
-- now in t_1, t_2, and t_3 are presorted records
-- do something there
END LOOP;
$$;
WITH Date_Range as (SELECT calendarDate, timeOfDay,
calendarDate + timeOfDay as rangeStart,
(calendarDate + timeOfDay) + INTERVAL '1 hour' as rangeEnd
FROM Calendar
CROSS JOIN TimeOfDay
WHERE calendarDate >= CAST('2013-12-16' as DATE)
AND calendarDate < CAST('2013-12-18' as DATE))
INSERT INTO HourlyData
SELECT data_id,
randomName,
latestPriceChangeAt,
EXTRACT(HOUR FROM timeOfDay) as hourOfDay,
EXTRACT(DAY FROM calendarDate) as dayOfMonth,
averageBuyPrice,
averageSellPrice,
previousAverageBuyPrice,
previousAverageSellPrice,
averageBuyCount,
averageSellCount,
previousAverageBuyCount,
previousAverageSellCount,
-- put the calls to your function here instead of these operations
averageBuyPrice - previousAverageBuyPrice,
averageSellPrice - previousAverageSellPrice,
averageBuyCount - previousAverageBuyCount,
averageSellCount - previousAverageSellCount
FROM (SELECT data_id, calendarDate, timeOfDay,
MAX(date) as latestPriceChangeAt,
MAX(name) as randomName,
AVG(sell_price) as averageSellPrice,
AVG(sell_count) as averageSellCount,
AVG(buy_price) as averageBuyPrice,
AVG(buy_count) as averageBuyCount,
LAG(AVG(buy_price)) OVER(PARTITION BY data_id ORDER BY calendarDate, timeOfDay) as previousAverageBuyPrice,
LAG(AVG(sell_price)) OVER(PARTITION BY data_id ORDER BY calendarDate, timeOfDay) as previousAverageSellPrice,
LAG(AVG(buy_count)) OVER(PARTITION BY data_id ORDER BY calendarDate, timeOfDay) as previousAverageBuyCount,
LAG(AVG(sell_count)) OVER(PARTITION BY data_id ORDER BY calendarDate, timeOfDay) as previousAverageSellCount
FROM Date_Range
JOIN Prices
ON Prices.date >= Date_Range.rangeStart
AND prices.date < Date_Range.rangeEnd
GROUP BY data_id, calendarDate, timeOfDay) data;