1-- This file and its contents are licensed under the Timescale License.
2-- Please see the included NOTICE for copyright information and
3-- LICENSE-TIMESCALE for a copy of the license.
4
5CREATE OR REPLACE VIEW compressed_chunk_info_view AS
6SELECT
7   h.schema_name AS hypertable_schema,
8   h.table_name AS hypertable_name,
9   c.schema_name as chunk_schema,
10   c.table_name as chunk_name,
11   c.status as chunk_status,
12   comp.schema_name as compressed_chunk_schema,
13   comp.table_name as compressed_chunk_name
14FROM
15   _timescaledb_catalog.hypertable h JOIN
16  _timescaledb_catalog.chunk c ON h.id = c.hypertable_id
17   LEFT JOIN _timescaledb_catalog.chunk comp
18ON comp.id = c.compressed_chunk_id
19;
20
21CREATE TABLE test2 (timec timestamptz NOT NULL, i integer ,
22      b bigint, t text);
23SELECT table_name from create_hypertable('test2', 'timec', chunk_time_interval=> INTERVAL '7 days');
24
25INSERT INTO test2 SELECT q, 10, 11, 'hello' FROM generate_series( '2020-01-03 10:00:00+00', '2020-01-03 12:00:00+00' , '5 min'::interval) q;
26ALTER TABLE test2 set (timescaledb.compress,
27timescaledb.compress_segmentby = 'b',
28timescaledb.compress_orderby = 'timec DESC');
29
30SELECT compress_chunk(c)
31FROM show_chunks('test2') c;
32
33---insert into the middle of the range ---
34INSERT INTO test2 values ( '2020-01-03 10:01:00+00', 20, 11, '2row');
35INSERT INTO test2 values ( '2020-01-03 11:01:00+00', 20, 11, '3row');
36INSERT INTO test2 values ( '2020-01-03 12:01:00+00', 20, 11, '4row');
37--- insert a new segment  by ---
38INSERT INTO test2 values ( '2020-01-03 11:01:00+00', 20, 12, '12row');
39
40SELECT time_bucket(INTERVAL '2 hour', timec), b, count(*)
41FROM test2
42GROUP BY time_bucket(INTERVAL '2 hour', timec), b
43ORDER BY 1, 2;
44
45--check status for chunk --
46SELECT chunk_status,
47       chunk_name as "CHUNK_NAME"
48FROM compressed_chunk_info_view
49WHERE hypertable_name = 'test2' ORDER BY chunk_name;
50
51SELECT compressed_chunk_schema || '.' || compressed_chunk_name as "COMP_CHUNK_NAME",
52        chunk_schema || '.' || chunk_name as "CHUNK_NAME"
53FROM compressed_chunk_info_view
54WHERE hypertable_name = 'test2' \gset
55
56SELECT count(*) from test2;
57
58SELECT recompress_chunk(:'CHUNK_NAME'::regclass);
59
60SELECT chunk_status,
61       chunk_name as "CHUNK_NAME"
62FROM compressed_chunk_info_view
63WHERE hypertable_name = 'test2' ORDER BY chunk_name;
64
65--- insert into a compressed chunk again + a new chunk--
66INSERT INTO test2 values ( '2020-01-03 11:01:03+00', 20, 11, '33row'),
67                         ( '2020-01-03 11:01:06+00', 20, 11, '36row'),
68                         ( '2020-01-03 11:02:00+00', 20, 12, '12row'),
69                         ( '2020-04-03 00:02:00+00', 30, 13, '3013row');
70
71SELECT time_bucket(INTERVAL '2 hour', timec), b, count(*)
72FROM test2
73GROUP BY time_bucket(INTERVAL '2 hour', timec), b
74ORDER BY 1, 2;
75
76--chunk status should be unordered for the previously compressed chunk
77SELECT chunk_status,
78       chunk_name as "CHUNK_NAME"
79FROM compressed_chunk_info_view
80WHERE hypertable_name = 'test2' ORDER BY chunk_name;
81
82SELECT add_compression_policy AS job_id
83  FROM add_compression_policy('test2', '30d'::interval) \gset
84CALL run_job(:job_id);
85CALL run_job(:job_id);
86
87-- status should be compressed ---
88SELECT chunk_status,
89       chunk_name as "CHUNK_NAME"
90FROM compressed_chunk_info_view
91WHERE hypertable_name = 'test2' ORDER BY chunk_name;
92
93\set ON_ERROR_STOP 0
94-- call recompress_chunk when status is not unordered
95SELECT recompress_chunk(:'CHUNK_NAME'::regclass, true);
96SELECT recompress_chunk(:'CHUNK_NAME'::regclass, false);
97
98--now decompress it , then try and recompress
99SELECT decompress_chunk(:'CHUNK_NAME'::regclass);
100SELECT recompress_chunk(:'CHUNK_NAME'::regclass);
101\set ON_ERROR_STOP 1
102
103-- test recompress policy
104CREATE TABLE metrics(time timestamptz NOT NULL);
105SELECT hypertable_id AS "HYPERTABLE_ID", schema_name, table_name, created FROM create_hypertable('metrics','time') \gset
106ALTER TABLE metrics SET (timescaledb.compress);
107
108-- create chunk with some data and compress
109INSERT INTO metrics SELECT '2000-01-01' FROM generate_series(1,10);
110
111-- create custom compression job without recompress boolean
112SELECT add_job('_timescaledb_internal.policy_compression','1w',('{"hypertable_id": '||:'HYPERTABLE_ID'||', "compress_after": "@ 7 days"}')::jsonb) AS "JOB_COMPRESS" \gset
113
114-- first call should compress
115CALL run_job(:JOB_COMPRESS);
116-- 2nd call should do nothing
117CALL run_job(:JOB_COMPRESS);
118
119---- status should be 1
120SELECT chunk_status FROM compressed_chunk_info_view WHERE hypertable_name = 'metrics';
121
122-- do an INSERT so recompress has something to do
123INSERT INTO metrics SELECT '2000-01-01';
124
125---- status should be 3
126SELECT chunk_status FROM compressed_chunk_info_view WHERE hypertable_name = 'metrics';
127
128-- should recompress
129CALL run_job(:JOB_COMPRESS);
130
131---- status should be 1
132SELECT chunk_status FROM compressed_chunk_info_view WHERE hypertable_name = 'metrics';
133
134-- disable recompress in compress job
135SELECT alter_job(id,config:=jsonb_set(config,'{recompress}','false')) FROM _timescaledb_config.bgw_job WHERE id = :JOB_COMPRESS;
136
137-- nothing to do
138CALL run_job(:JOB_COMPRESS);
139
140---- status should be 1
141SELECT chunk_status FROM compressed_chunk_info_view WHERE hypertable_name = 'metrics';
142
143-- do an INSERT so recompress has something to do
144INSERT INTO metrics SELECT '2000-01-01';
145
146---- status should be 3
147SELECT chunk_status FROM compressed_chunk_info_view WHERE hypertable_name = 'metrics';
148
149-- still nothing to do since we disabled recompress
150CALL run_job(:JOB_COMPRESS);
151
152---- status should be 3
153SELECT chunk_status FROM compressed_chunk_info_view WHERE hypertable_name = 'metrics';
154
155-- reenable recompress in compress job
156SELECT alter_job(id,config:=jsonb_set(config,'{recompress}','true')) FROM _timescaledb_config.bgw_job WHERE id = :JOB_COMPRESS;
157
158-- should recompress now
159CALL run_job(:JOB_COMPRESS);
160
161---- status should be 1
162SELECT chunk_status FROM compressed_chunk_info_view WHERE hypertable_name = 'metrics';
163
164SELECT delete_job(:JOB_COMPRESS);
165
166SELECT add_job('_timescaledb_internal.policy_recompression','1w',('{"hypertable_id": '||:'HYPERTABLE_ID'||', "recompress_after": "@ 7 days", "maxchunks_to_compress": 1}')::jsonb) AS "JOB_RECOMPRESS" \gset
167
168---- status should be 1
169SELECT chunk_status FROM compressed_chunk_info_view WHERE hypertable_name = 'metrics';
170
171---- nothing to do yet
172CALL run_job(:JOB_RECOMPRESS);
173
174---- status should be 1
175SELECT chunk_status FROM compressed_chunk_info_view WHERE hypertable_name = 'metrics';
176
177-- create some work for recompress
178INSERT INTO metrics SELECT '2000-01-01';
179
180-- status should be 3
181SELECT chunk_status FROM compressed_chunk_info_view WHERE hypertable_name = 'metrics';
182
183CALL run_job(:JOB_RECOMPRESS);
184
185-- status should be 1
186SELECT chunk_status FROM compressed_chunk_info_view WHERE hypertable_name = 'metrics';
187
188SELECT delete_job(:JOB_RECOMPRESS);
189
190