-- Initialization that requires path substitution.
+-- directory paths and dlsuffix are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+\getenv libdir PG_LIBDIR
+\getenv dlsuffix PG_DLSUFFIX
+
+\set regresslib :libdir '/regress' :dlsuffix
+
CREATE FUNCTION setenv(text, text)
RETURNS void
- AS '@libdir@/regress@DLSUFFIX@', 'regress_setenv'
+ AS :'regresslib', 'regress_setenv'
LANGUAGE C STRICT;
CREATE FUNCTION wait_pid(int)
RETURNS void
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
+\set path :abs_srcdir '/'
+\set fnbody 'SELECT setenv(''PGSERVICEFILE'', ' :'path' ' || $1)'
CREATE FUNCTION set_pgservicefile(text) RETURNS void LANGUAGE SQL
- AS $$SELECT setenv('PGSERVICEFILE', '@abs_srcdir@/' || $1)$$;
+ AS :'fnbody';
-- Initialization that requires path substitution.
+-- directory paths and dlsuffix are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+\getenv libdir PG_LIBDIR
+\getenv dlsuffix PG_DLSUFFIX
+\set regresslib :libdir '/regress' :dlsuffix
CREATE FUNCTION setenv(text, text)
RETURNS void
- AS '@libdir@/regress@DLSUFFIX@', 'regress_setenv'
+ AS :'regresslib', 'regress_setenv'
LANGUAGE C STRICT;
CREATE FUNCTION wait_pid(int)
RETURNS void
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
+\set path :abs_srcdir '/'
+\set fnbody 'SELECT setenv(''PGSERVICEFILE'', ' :'path' ' || $1)'
CREATE FUNCTION set_pgservicefile(text) RETURNS void LANGUAGE SQL
- AS $$SELECT setenv('PGSERVICEFILE', '@abs_srcdir@/' || $1)$$;
+ AS :'fnbody';
-- Test foreign-data wrapper file_fdw.
--
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+
-- Clean up in case a prior regression run failed
SET client_min_messages TO 'warning';
DROP ROLE IF EXISTS regress_file_fdw_superuser, regress_file_fdw_user, regress_no_priv_user;
-- Install file_fdw
CREATE EXTENSION file_fdw;
+-- create function to filter unstable results of EXPLAIN
+CREATE FUNCTION explain_filter(text) RETURNS setof text
+LANGUAGE plpgsql AS
+$$
+declare
+ ln text;
+begin
+ for ln in execute $1
+ loop
+ -- Remove the path portion of foreign file names
+ ln := regexp_replace(ln, 'Foreign File: .*/([a-z.]+)$', 'Foreign File: .../\1');
+ return next ln;
+ end loop;
+end;
+$$;
+
-- regress_file_fdw_superuser owns fdw-related objects
SET ROLE regress_file_fdw_superuser;
CREATE SERVER file_server FOREIGN DATA WRAPPER file_fdw;
'); -- ERROR
CREATE FOREIGN TABLE tbl () SERVER file_server; -- ERROR
+\set filename :abs_srcdir '/data/agg.data'
CREATE FOREIGN TABLE agg_text (
a int2 CHECK (a >= 0),
b float4
) SERVER file_server
-OPTIONS (format 'text', filename '@abs_srcdir@/data/agg.data', delimiter ' ', null '\N');
+OPTIONS (format 'text', filename :'filename', delimiter ' ', null '\N');
GRANT SELECT ON agg_text TO regress_file_fdw_user;
+
+\set filename :abs_srcdir '/data/agg.csv'
CREATE FOREIGN TABLE agg_csv (
a int2,
b float4
) SERVER file_server
-OPTIONS (format 'csv', filename '@abs_srcdir@/data/agg.csv', header 'true', delimiter ';', quote '@', escape '"', null '');
+OPTIONS (format 'csv', filename :'filename', header 'true', delimiter ';', quote '@', escape '"', null '');
ALTER FOREIGN TABLE agg_csv ADD CHECK (a >= 0);
+
+\set filename :abs_srcdir '/data/agg.bad'
CREATE FOREIGN TABLE agg_bad (
a int2,
b float4
) SERVER file_server
-OPTIONS (format 'csv', filename '@abs_srcdir@/data/agg.bad', header 'true', delimiter ';', quote '@', escape '"', null '');
+OPTIONS (format 'csv', filename :'filename', header 'true', delimiter ';', quote '@', escape '"', null '');
ALTER FOREIGN TABLE agg_bad ADD CHECK (a >= 0);
-- per-column options tests
+\set filename :abs_srcdir '/data/text.csv'
CREATE FOREIGN TABLE text_csv (
word1 text OPTIONS (force_not_null 'true'),
word2 text OPTIONS (force_not_null 'off'),
word3 text OPTIONS (force_null 'true'),
word4 text OPTIONS (force_null 'off')
) SERVER file_server
-OPTIONS (format 'text', filename '@abs_srcdir@/data/text.csv', null 'NULL');
+OPTIONS (format 'text', filename :'filename', null 'NULL');
SELECT * FROM text_csv; -- ERROR
ALTER FOREIGN TABLE text_csv OPTIONS (SET format 'csv');
\pset null _null_
-- misc query tests
\t on
-EXPLAIN (VERBOSE, COSTS FALSE) SELECT * FROM agg_csv;
+SELECT explain_filter('EXPLAIN (VERBOSE, COSTS FALSE) SELECT * FROM agg_csv');
\t off
PREPARE st(int) AS SELECT * FROM agg_csv WHERE a = $1;
EXECUTE st(100);
-- constraint exclusion tests
\t on
-EXPLAIN (VERBOSE, COSTS FALSE) SELECT * FROM agg_csv WHERE a < 0;
+SELECT explain_filter('EXPLAIN (VERBOSE, COSTS FALSE) SELECT * FROM agg_csv WHERE a < 0');
\t off
SELECT * FROM agg_csv WHERE a < 0;
SET constraint_exclusion = 'on';
\t on
-EXPLAIN (VERBOSE, COSTS FALSE) SELECT * FROM agg_csv WHERE a < 0;
+SELECT explain_filter('EXPLAIN (VERBOSE, COSTS FALSE) SELECT * FROM agg_csv WHERE a < 0');
\t off
SELECT * FROM agg_csv WHERE a < 0;
RESET constraint_exclusion;
-- declarative partitioning tests
SET ROLE regress_file_fdw_superuser;
CREATE TABLE pt (a int, b text) partition by list (a);
+\set filename :abs_srcdir '/data/list1.csv'
CREATE FOREIGN TABLE p1 partition of pt for values in (1) SERVER file_server
-OPTIONS (format 'csv', filename '@abs_srcdir@/data/list1.csv', delimiter ',');
+OPTIONS (format 'csv', filename :'filename', delimiter ',');
CREATE TABLE p2 partition of pt for values in (2);
SELECT tableoid::regclass, * FROM pt;
SELECT tableoid::regclass, * FROM p1;
SELECT tableoid::regclass, * FROM p2;
-COPY pt FROM '@abs_srcdir@/data/list2.bad' with (format 'csv', delimiter ','); -- ERROR
-COPY pt FROM '@abs_srcdir@/data/list2.csv' with (format 'csv', delimiter ',');
+\set filename :abs_srcdir '/data/list2.bad'
+COPY pt FROM :'filename' with (format 'csv', delimiter ','); -- ERROR
+\set filename :abs_srcdir '/data/list2.csv'
+COPY pt FROM :'filename' with (format 'csv', delimiter ',');
SELECT tableoid::regclass, * FROM pt;
SELECT tableoid::regclass, * FROM p1;
SELECT tableoid::regclass, * FROM p2;
DROP TABLE pt;
-- generated column tests
+\set filename :abs_srcdir '/data/list1.csv'
CREATE FOREIGN TABLE gft1 (a int, b text, c text GENERATED ALWAYS AS ('foo') STORED) SERVER file_server
-OPTIONS (format 'csv', filename '@abs_srcdir@/data/list1.csv', delimiter ',');
+OPTIONS (format 'csv', filename :'filename', delimiter ',');
SELECT a, c FROM gft1;
DROP FOREIGN TABLE gft1;
SELECT * FROM agg_text ORDER BY a; -- ERROR
SET ROLE regress_file_fdw_user;
\t on
-EXPLAIN (VERBOSE, COSTS FALSE) SELECT * FROM agg_text WHERE a > 0;
+SELECT explain_filter('EXPLAIN (VERBOSE, COSTS FALSE) SELECT * FROM agg_text WHERE a > 0');
\t off
-- file FDW allows foreign tables to be accessed without user mapping
DROP USER MAPPING FOR regress_file_fdw_user SERVER file_server;
--
-- Test foreign-data wrapper file_fdw.
--
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
-- Clean up in case a prior regression run failed
SET client_min_messages TO 'warning';
DROP ROLE IF EXISTS regress_file_fdw_superuser, regress_file_fdw_user, regress_no_priv_user;
CREATE ROLE regress_no_priv_user LOGIN; -- has priv but no user mapping
-- Install file_fdw
CREATE EXTENSION file_fdw;
+-- create function to filter unstable results of EXPLAIN
+CREATE FUNCTION explain_filter(text) RETURNS setof text
+LANGUAGE plpgsql AS
+$$
+declare
+ ln text;
+begin
+ for ln in execute $1
+ loop
+ -- Remove the path portion of foreign file names
+ ln := regexp_replace(ln, 'Foreign File: .*/([a-z.]+)$', 'Foreign File: .../\1');
+ return next ln;
+ end loop;
+end;
+$$;
-- regress_file_fdw_superuser owns fdw-related objects
SET ROLE regress_file_fdw_superuser;
CREATE SERVER file_server FOREIGN DATA WRAPPER file_fdw;
ERROR: COPY null representation cannot use newline or carriage return
CREATE FOREIGN TABLE tbl () SERVER file_server; -- ERROR
ERROR: either filename or program is required for file_fdw foreign tables
+\set filename :abs_srcdir '/data/agg.data'
CREATE FOREIGN TABLE agg_text (
a int2 CHECK (a >= 0),
b float4
) SERVER file_server
-OPTIONS (format 'text', filename '@abs_srcdir@/data/agg.data', delimiter ' ', null '\N');
+OPTIONS (format 'text', filename :'filename', delimiter ' ', null '\N');
GRANT SELECT ON agg_text TO regress_file_fdw_user;
+\set filename :abs_srcdir '/data/agg.csv'
CREATE FOREIGN TABLE agg_csv (
a int2,
b float4
) SERVER file_server
-OPTIONS (format 'csv', filename '@abs_srcdir@/data/agg.csv', header 'true', delimiter ';', quote '@', escape '"', null '');
+OPTIONS (format 'csv', filename :'filename', header 'true', delimiter ';', quote '@', escape '"', null '');
ALTER FOREIGN TABLE agg_csv ADD CHECK (a >= 0);
+\set filename :abs_srcdir '/data/agg.bad'
CREATE FOREIGN TABLE agg_bad (
a int2,
b float4
) SERVER file_server
-OPTIONS (format 'csv', filename '@abs_srcdir@/data/agg.bad', header 'true', delimiter ';', quote '@', escape '"', null '');
+OPTIONS (format 'csv', filename :'filename', header 'true', delimiter ';', quote '@', escape '"', null '');
ALTER FOREIGN TABLE agg_bad ADD CHECK (a >= 0);
-- per-column options tests
+\set filename :abs_srcdir '/data/text.csv'
CREATE FOREIGN TABLE text_csv (
word1 text OPTIONS (force_not_null 'true'),
word2 text OPTIONS (force_not_null 'off'),
word3 text OPTIONS (force_null 'true'),
word4 text OPTIONS (force_null 'off')
) SERVER file_server
-OPTIONS (format 'text', filename '@abs_srcdir@/data/text.csv', null 'NULL');
+OPTIONS (format 'text', filename :'filename', null 'NULL');
SELECT * FROM text_csv; -- ERROR
ERROR: COPY force not null available only in CSV mode
ALTER FOREIGN TABLE text_csv OPTIONS (SET format 'csv');
CONTEXT: COPY agg_bad, line 3, column b: "aaa"
-- misc query tests
\t on
-EXPLAIN (VERBOSE, COSTS FALSE) SELECT * FROM agg_csv;
+SELECT explain_filter('EXPLAIN (VERBOSE, COSTS FALSE) SELECT * FROM agg_csv');
Foreign Scan on public.agg_csv
Output: a, b
- Foreign File: @abs_srcdir@/data/agg.csv
+ Foreign File: .../agg.csv
\t off
PREPARE st(int) AS SELECT * FROM agg_csv WHERE a = $1;
ERROR: cannot insert into foreign table "agg_csv"
-- constraint exclusion tests
\t on
-EXPLAIN (VERBOSE, COSTS FALSE) SELECT * FROM agg_csv WHERE a < 0;
+SELECT explain_filter('EXPLAIN (VERBOSE, COSTS FALSE) SELECT * FROM agg_csv WHERE a < 0');
Foreign Scan on public.agg_csv
Output: a, b
Filter: (agg_csv.a < 0)
- Foreign File: @abs_srcdir@/data/agg.csv
+ Foreign File: .../agg.csv
\t off
SELECT * FROM agg_csv WHERE a < 0;
SET constraint_exclusion = 'on';
\t on
-EXPLAIN (VERBOSE, COSTS FALSE) SELECT * FROM agg_csv WHERE a < 0;
+SELECT explain_filter('EXPLAIN (VERBOSE, COSTS FALSE) SELECT * FROM agg_csv WHERE a < 0');
Result
Output: a, b
One-Time Filter: false
-- declarative partitioning tests
SET ROLE regress_file_fdw_superuser;
CREATE TABLE pt (a int, b text) partition by list (a);
+\set filename :abs_srcdir '/data/list1.csv'
CREATE FOREIGN TABLE p1 partition of pt for values in (1) SERVER file_server
-OPTIONS (format 'csv', filename '@abs_srcdir@/data/list1.csv', delimiter ',');
+OPTIONS (format 'csv', filename :'filename', delimiter ',');
CREATE TABLE p2 partition of pt for values in (2);
SELECT tableoid::regclass, * FROM pt;
tableoid | a | b
----------+---+---
(0 rows)
-COPY pt FROM '@abs_srcdir@/data/list2.bad' with (format 'csv', delimiter ','); -- ERROR
+\set filename :abs_srcdir '/data/list2.bad'
+COPY pt FROM :'filename' with (format 'csv', delimiter ','); -- ERROR
ERROR: cannot insert into foreign table "p1"
CONTEXT: COPY pt, line 2: "1,qux"
-COPY pt FROM '@abs_srcdir@/data/list2.csv' with (format 'csv', delimiter ',');
+\set filename :abs_srcdir '/data/list2.csv'
+COPY pt FROM :'filename' with (format 'csv', delimiter ',');
SELECT tableoid::regclass, * FROM pt;
tableoid | a | b
----------+---+-----
DROP TABLE pt;
-- generated column tests
+\set filename :abs_srcdir '/data/list1.csv'
CREATE FOREIGN TABLE gft1 (a int, b text, c text GENERATED ALWAYS AS ('foo') STORED) SERVER file_server
-OPTIONS (format 'csv', filename '@abs_srcdir@/data/list1.csv', delimiter ',');
+OPTIONS (format 'csv', filename :'filename', delimiter ',');
SELECT a, c FROM gft1;
a | c
---+--------
ERROR: permission denied for foreign table agg_text
SET ROLE regress_file_fdw_user;
\t on
-EXPLAIN (VERBOSE, COSTS FALSE) SELECT * FROM agg_text WHERE a > 0;
+SELECT explain_filter('EXPLAIN (VERBOSE, COSTS FALSE) SELECT * FROM agg_text WHERE a > 0');
Foreign Scan on public.agg_text
Output: a, b
Filter: (agg_text.a > 0)
- Foreign File: @abs_srcdir@/data/agg.data
+ Foreign File: .../agg.data
\t off
-- file FDW allows foreign tables to be accessed without user mapping
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+\getenv abs_builddir PG_ABS_BUILDDIR
+
+-- set up file names to use
+\set srcfilename :abs_srcdir '/data/copy1.data'
+\set destfilename :abs_builddir '/results/copy1.data'
+
CREATE TABLE copy1 (a int, b float);
-- COPY TO/FROM not authorized from client.
-- Valid cases
-- COPY FROM
-DO LANGUAGE plpgsql $$
-BEGIN
- COPY copy1 FROM '@abs_srcdir@/data/copy1.data';
-END;
-$$;
+\set dobody 'BEGIN COPY copy1 FROM ' :'srcfilename' '; END'
+DO LANGUAGE plpgsql :'dobody';
SELECT * FROM copy1 ORDER BY 1;
TRUNCATE copy1;
-DO LANGUAGE plpgsql $$
-BEGIN
- EXECUTE 'COPY copy1 FROM ''@abs_srcdir@/data/copy1.data''';
-END;
-$$;
+\set cmd 'COPY copy1 FROM ' :'srcfilename'
+\set dobody 'BEGIN EXECUTE ' :'cmd' '; END'
+DO LANGUAGE plpgsql :'dobody';
SELECT * FROM copy1 ORDER BY 1;
-- COPY TO
-- Copy the data externally once, then process it back to the table.
-DO LANGUAGE plpgsql $$
-BEGIN
- COPY copy1 TO '@abs_builddir@/results/copy1.data';
-END;
-$$;
+\set dobody 'BEGIN COPY copy1 TO ' :'destfilename' '; END'
+DO LANGUAGE plpgsql :'dobody';
TRUNCATE copy1;
-DO LANGUAGE plpgsql $$
-BEGIN
- COPY copy1 FROM '@abs_builddir@/results/copy1.data';
-END;
-$$;
-DO LANGUAGE plpgsql $$
-BEGIN
- EXECUTE 'COPY copy1 FROM ''@abs_builddir@/results/copy1.data''';
-END;
-$$;
+\set dobody 'BEGIN COPY copy1 FROM ' :'destfilename' '; END'
+DO LANGUAGE plpgsql :'dobody';
+
+\set cmd 'COPY copy1 FROM ' :'destfilename'
+\set dobody 'BEGIN EXECUTE ' :'cmd' '; END'
+DO LANGUAGE plpgsql :'dobody';
SELECT * FROM copy1 ORDER BY 1;
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+\getenv abs_builddir PG_ABS_BUILDDIR
+-- set up file names to use
+\set srcfilename :abs_srcdir '/data/copy1.data'
+\set destfilename :abs_builddir '/results/copy1.data'
CREATE TABLE copy1 (a int, b float);
-- COPY TO/FROM not authorized from client.
DO LANGUAGE plpgsql $$
CONTEXT: PL/pgSQL function inline_code_block line 3 at EXECUTE
-- Valid cases
-- COPY FROM
-DO LANGUAGE plpgsql $$
-BEGIN
- COPY copy1 FROM '@abs_srcdir@/data/copy1.data';
-END;
-$$;
+\set dobody 'BEGIN COPY copy1 FROM ' :'srcfilename' '; END'
+DO LANGUAGE plpgsql :'dobody';
SELECT * FROM copy1 ORDER BY 1;
a | b
---+-----
(3 rows)
TRUNCATE copy1;
-DO LANGUAGE plpgsql $$
-BEGIN
- EXECUTE 'COPY copy1 FROM ''@abs_srcdir@/data/copy1.data''';
-END;
-$$;
+\set cmd 'COPY copy1 FROM ' :'srcfilename'
+\set dobody 'BEGIN EXECUTE ' :'cmd' '; END'
+DO LANGUAGE plpgsql :'dobody';
SELECT * FROM copy1 ORDER BY 1;
a | b
---+-----
-- COPY TO
-- Copy the data externally once, then process it back to the table.
-DO LANGUAGE plpgsql $$
-BEGIN
- COPY copy1 TO '@abs_builddir@/results/copy1.data';
-END;
-$$;
+\set dobody 'BEGIN COPY copy1 TO ' :'destfilename' '; END'
+DO LANGUAGE plpgsql :'dobody';
TRUNCATE copy1;
-DO LANGUAGE plpgsql $$
-BEGIN
- COPY copy1 FROM '@abs_builddir@/results/copy1.data';
-END;
-$$;
-DO LANGUAGE plpgsql $$
-BEGIN
- EXECUTE 'COPY copy1 FROM ''@abs_builddir@/results/copy1.data''';
-END;
-$$;
+\set dobody 'BEGIN COPY copy1 FROM ' :'destfilename' '; END'
+DO LANGUAGE plpgsql :'dobody';
+\set cmd 'COPY copy1 FROM ' :'destfilename'
+\set dobody 'BEGIN EXECUTE ' :'cmd' '; END'
+DO LANGUAGE plpgsql :'dobody';
SELECT * FROM copy1 ORDER BY 1;
a | b
---+-----
-- - EXCLUDE clauses
--
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+
--
-- DEFAULT syntax
--
CONSTRAINT COPY_CON
CHECK (x > 3 AND y <> 'check failed' AND x < 7 ));
-COPY COPY_TBL FROM '@abs_srcdir@/data/constro.data';
+\set filename :abs_srcdir '/data/constro.data'
+COPY COPY_TBL FROM :'filename';
SELECT * FROM COPY_TBL;
-COPY COPY_TBL FROM '@abs_srcdir@/data/constrf.data';
+\set filename :abs_srcdir '/data/constrf.data'
+COPY COPY_TBL FROM :'filename';
SELECT * FROM COPY_TBL;
-- COPY
--
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+\getenv abs_builddir PG_ABS_BUILDDIR
+
-- CLASS POPULATION
-- (any resemblance to real life is purely coincidental)
--
-COPY aggtest FROM '@abs_srcdir@/data/agg.data';
+\set filename :abs_srcdir '/data/agg.data'
+COPY aggtest FROM :'filename';
-COPY onek FROM '@abs_srcdir@/data/onek.data';
+\set filename :abs_srcdir '/data/onek.data'
+COPY onek FROM :'filename';
-COPY onek TO '@abs_builddir@/results/onek.data';
+\set filename :abs_builddir '/results/onek.data'
+COPY onek TO :'filename';
DELETE FROM onek;
-COPY onek FROM '@abs_builddir@/results/onek.data';
+COPY onek FROM :'filename';
-COPY tenk1 FROM '@abs_srcdir@/data/tenk.data';
+\set filename :abs_srcdir '/data/tenk.data'
+COPY tenk1 FROM :'filename';
-COPY slow_emp4000 FROM '@abs_srcdir@/data/rect.data';
+\set filename :abs_srcdir '/data/rect.data'
+COPY slow_emp4000 FROM :'filename';
-COPY person FROM '@abs_srcdir@/data/person.data';
+\set filename :abs_srcdir '/data/person.data'
+COPY person FROM :'filename';
-COPY emp FROM '@abs_srcdir@/data/emp.data';
+\set filename :abs_srcdir '/data/emp.data'
+COPY emp FROM :'filename';
-COPY student FROM '@abs_srcdir@/data/student.data';
+\set filename :abs_srcdir '/data/student.data'
+COPY student FROM :'filename';
-COPY stud_emp FROM '@abs_srcdir@/data/stud_emp.data';
+\set filename :abs_srcdir '/data/stud_emp.data'
+COPY stud_emp FROM :'filename';
-COPY road FROM '@abs_srcdir@/data/streets.data';
+\set filename :abs_srcdir '/data/streets.data'
+COPY road FROM :'filename';
-COPY real_city FROM '@abs_srcdir@/data/real_city.data';
+\set filename :abs_srcdir '/data/real_city.data'
+COPY real_city FROM :'filename';
-COPY hash_i4_heap FROM '@abs_srcdir@/data/hash.data';
+\set filename :abs_srcdir '/data/hash.data'
+COPY hash_i4_heap FROM :'filename';
-COPY hash_name_heap FROM '@abs_srcdir@/data/hash.data';
+COPY hash_name_heap FROM :'filename';
-COPY hash_txt_heap FROM '@abs_srcdir@/data/hash.data';
+COPY hash_txt_heap FROM :'filename';
-COPY hash_f8_heap FROM '@abs_srcdir@/data/hash.data';
+COPY hash_f8_heap FROM :'filename';
-COPY test_tsvector FROM '@abs_srcdir@/data/tsearch.data';
+\set filename :abs_srcdir '/data/tsearch.data'
+COPY test_tsvector FROM :'filename';
-COPY testjsonb FROM '@abs_srcdir@/data/jsonb.data';
+\set filename :abs_srcdir '/data/jsonb.data'
+COPY testjsonb FROM :'filename';
-- the data in this file has a lot of duplicates in the index key
-- fields, leading to long bucket chains and lots of table expansion.
-- this is therefore a stress test of the bucket overflow code (unlike
-- the data in hash.data, which has unique index keys).
--
--- COPY hash_ovfl_heap FROM '@abs_srcdir@/data/hashovfl.data';
+-- \set filename :abs_srcdir '/data/hashovfl.data'
+-- COPY hash_ovfl_heap FROM :'filename';
-COPY bt_i4_heap FROM '@abs_srcdir@/data/desc.data';
+\set filename :abs_srcdir '/data/desc.data'
+COPY bt_i4_heap FROM :'filename';
-COPY bt_name_heap FROM '@abs_srcdir@/data/hash.data';
+\set filename :abs_srcdir '/data/hash.data'
+COPY bt_name_heap FROM :'filename';
-COPY bt_txt_heap FROM '@abs_srcdir@/data/desc.data';
+\set filename :abs_srcdir '/data/desc.data'
+COPY bt_txt_heap FROM :'filename';
-COPY bt_f8_heap FROM '@abs_srcdir@/data/hash.data';
+\set filename :abs_srcdir '/data/hash.data'
+COPY bt_f8_heap FROM :'filename';
-COPY array_op_test FROM '@abs_srcdir@/data/array.data';
+\set filename :abs_srcdir '/data/array.data'
+COPY array_op_test FROM :'filename';
-COPY array_index_op_test FROM '@abs_srcdir@/data/array.data';
+\set filename :abs_srcdir '/data/array.data'
+COPY array_index_op_test FROM :'filename';
-- analyze all the data we just loaded, to ensure plan consistency
-- in later tests
insert into copytest values('Mac',E'abc\rdef',3);
insert into copytest values(E'esc\\ape',E'a\\r\\\r\\\n\\nb',4);
-copy copytest to '@abs_builddir@/results/copytest.csv' csv;
+\set filename :abs_builddir '/results/copytest.csv'
+copy copytest to :'filename' csv;
create temp table copytest2 (like copytest);
-copy copytest2 from '@abs_builddir@/results/copytest.csv' csv;
+copy copytest2 from :'filename' csv;
select * from copytest except select * from copytest2;
--- same test but with an escape char different from quote char
-copy copytest to '@abs_builddir@/results/copytest.csv' csv quote '''' escape E'\\';
+copy copytest to :'filename' csv quote '''' escape E'\\';
-copy copytest2 from '@abs_builddir@/results/copytest.csv' csv quote '''' escape E'\\';
+copy copytest2 from :'filename' csv quote '''' escape E'\\';
select * from copytest except select * from copytest2;
insert into parted_copytest select x,2,'Two' from generate_series(1001,1010) x;
insert into parted_copytest select x,1,'One' from generate_series(1011,1020) x;
-copy (select * from parted_copytest order by a) to '@abs_builddir@/results/parted_copytest.csv';
+\set filename :abs_builddir '/results/parted_copytest.csv'
+copy (select * from parted_copytest order by a) to :'filename';
truncate parted_copytest;
-copy parted_copytest from '@abs_builddir@/results/parted_copytest.csv';
+copy parted_copytest from :'filename';
-- Ensure COPY FREEZE errors for partitioned tables.
begin;
truncate parted_copytest;
-copy parted_copytest from '@abs_builddir@/results/parted_copytest.csv' (freeze);
+copy parted_copytest from :'filename' (freeze);
rollback;
select tableoid::regclass,count(*),sum(a) from parted_copytest
for each row
execute procedure part_ins_func();
-copy parted_copytest from '@abs_builddir@/results/parted_copytest.csv';
+copy parted_copytest from :'filename';
select tableoid::regclass,count(*),sum(a) from parted_copytest
group by tableoid order by tableoid::regclass::name;
-- Generate COPY FROM report with FILE, with some excluded tuples.
truncate tab_progress_reporting;
-copy tab_progress_reporting from '@abs_srcdir@/data/emp.data'
+\set filename :abs_srcdir '/data/emp.data'
+copy tab_progress_reporting from :'filename'
where (salary < 2000);
drop trigger check_after_tab_progress_reporting on tab_progress_reporting;
-- CREATE_FUNCTION_0
--
+-- directory path and dlsuffix are passed to us in environment variables
+\getenv libdir PG_LIBDIR
+\getenv dlsuffix PG_DLSUFFIX
+
+\set autoinclib :libdir '/autoinc' :dlsuffix
+\set refintlib :libdir '/refint' :dlsuffix
+\set regresslib :libdir '/regress' :dlsuffix
+
-- Create a bunch of C functions that will be used by later tests:
CREATE FUNCTION check_primary_key ()
RETURNS trigger
- AS '@libdir@/refint@DLSUFFIX@'
+ AS :'refintlib'
LANGUAGE C;
CREATE FUNCTION check_foreign_key ()
RETURNS trigger
- AS '@libdir@/refint@DLSUFFIX@'
+ AS :'refintlib'
LANGUAGE C;
CREATE FUNCTION autoinc ()
RETURNS trigger
- AS '@libdir@/autoinc@DLSUFFIX@'
+ AS :'autoinclib'
LANGUAGE C;
CREATE FUNCTION trigger_return_old ()
RETURNS trigger
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C;
CREATE FUNCTION ttdummy ()
RETURNS trigger
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C;
CREATE FUNCTION set_ttdummy (int4)
RETURNS int4
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION make_tuple_indirect (record)
RETURNS record
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION test_atomic_ops()
RETURNS bool
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C;
CREATE FUNCTION test_fdw_handler()
RETURNS fdw_handler
- AS '@libdir@/regress@DLSUFFIX@', 'test_fdw_handler'
+ AS :'regresslib', 'test_fdw_handler'
LANGUAGE C;
CREATE FUNCTION test_support_func(internal)
RETURNS internal
- AS '@libdir@/regress@DLSUFFIX@', 'test_support_func'
+ AS :'regresslib', 'test_support_func'
LANGUAGE C STRICT;
CREATE FUNCTION test_opclass_options_func(internal)
RETURNS void
- AS '@libdir@/regress@DLSUFFIX@', 'test_opclass_options_func'
+ AS :'regresslib', 'test_opclass_options_func'
LANGUAGE C;
CREATE FUNCTION test_enc_conversion(bytea, name, name, bool, validlen OUT int, result OUT bytea)
- AS '@libdir@/regress@DLSUFFIX@', 'test_enc_conversion'
+ AS :'regresslib', 'test_enc_conversion'
LANGUAGE C STRICT;
CREATE FUNCTION binary_coercible(oid, oid)
RETURNS bool
- AS '@libdir@/regress@DLSUFFIX@', 'binary_coercible'
+ AS :'regresslib', 'binary_coercible'
LANGUAGE C STRICT STABLE PARALLEL SAFE;
-- Things that shouldn't work:
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE C
AS 'nosuchfile';
+-- To produce stable regression test output, we have to filter the name
+-- of the regresslib file out of the error message in this test.
+\set VERBOSITY sqlstate
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE C
- AS '@libdir@/regress@DLSUFFIX@', 'nosuchsymbol';
+ AS :'regresslib', 'nosuchsymbol';
+\set VERBOSITY default
+SELECT regexp_replace(:'LAST_ERROR_MESSAGE', 'file ".*"', 'file "..."');
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE internal
AS 'nosuch';
-- CREATE_FUNCTION_1
--
+-- directory path and dlsuffix are passed to us in environment variables
+\getenv libdir PG_LIBDIR
+\getenv dlsuffix PG_DLSUFFIX
+
+\set regresslib :libdir '/regress' :dlsuffix
+
-- Create C functions needed by create_type.sql
CREATE FUNCTION widget_in(cstring)
RETURNS widget
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
CREATE FUNCTION widget_out(widget)
RETURNS cstring
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
CREATE FUNCTION int44in(cstring)
RETURNS city_budget
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
CREATE FUNCTION int44out(city_budget)
RETURNS cstring
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
--
-- CREATE_FUNCTION_2
--
+
+-- directory path and dlsuffix are passed to us in environment variables
+\getenv libdir PG_LIBDIR
+\getenv dlsuffix PG_DLSUFFIX
+
+\set regresslib :libdir '/regress' :dlsuffix
+
+
CREATE FUNCTION hobbies(person)
RETURNS setof hobbies_r
AS 'select * from hobbies_r where person = $1.name'
CREATE FUNCTION pt_in_widget(point, widget)
RETURNS bool
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION overpaid(emp)
RETURNS bool
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION interpt_pp(path, path)
RETURNS point
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION reverse_name(name)
RETURNS name
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
--
-- Function dynamic loading
--
-LOAD '@libdir@/regress@DLSUFFIX@';
+LOAD :'regresslib';
-- Test large object support
--
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+\getenv abs_builddir PG_ABS_BUILDDIR
+
-- ensure consistent test output regardless of the default bytea format
SET bytea_output TO escape;
SELECT lo_open(loid, x'40000'::int) from lotest_stash_values;
ABORT;
-DO $$
-DECLARE
- loid oid;
-BEGIN
- SELECT tbl.loid INTO loid FROM lotest_stash_values tbl;
- PERFORM lo_export(loid, '@abs_builddir@/results/invalid/path');
-EXCEPTION
- WHEN UNDEFINED_FILE THEN RAISE NOTICE 'could not open file, as expected';
-END;
-$$;
+\set filename :abs_builddir '/results/invalid/path'
+\set dobody 'DECLARE loid oid; BEGIN '
+\set dobody :dobody 'SELECT tbl.loid INTO loid FROM lotest_stash_values tbl; '
+\set dobody :dobody 'PERFORM lo_export(loid, ' :'filename' '); '
+\set dobody :dobody 'EXCEPTION WHEN UNDEFINED_FILE THEN '
+\set dobody :dobody 'RAISE NOTICE ''could not open file, as expected''; END'
+DO :'dobody';
-- Test truncation.
BEGIN;
TRUNCATE lotest_stash_values;
-INSERT INTO lotest_stash_values (loid) SELECT lo_import('@abs_srcdir@/data/tenk.data');
+\set filename :abs_srcdir '/data/tenk.data'
+INSERT INTO lotest_stash_values (loid) SELECT lo_import(:'filename');
BEGIN;
UPDATE lotest_stash_values SET fd=lo_open(loid, CAST(x'20000' | x'40000' AS integer));
SELECT lo_close(fd) FROM lotest_stash_values;
END;
-SELECT lo_export(loid, '@abs_builddir@/results/lotest.txt') FROM lotest_stash_values;
+\set filename :abs_builddir '/results/lotest.txt'
+SELECT lo_export(loid, :'filename') FROM lotest_stash_values;
-\lo_import '@abs_builddir@/results/lotest.txt'
+\lo_import :filename
\set newloid :LASTOID
-- just make sure \lo_export does not barf
-\lo_export :newloid '@abs_builddir@/results/lotest2.txt'
+\set filename :abs_builddir '/results/lotest2.txt'
+\lo_export :newloid :filename
-- This is a hack to test that export/import are reversible
-- This uses knowledge about the inner workings of large object mechanism
\lo_unlink :newloid
-\lo_import '@abs_builddir@/results/lotest.txt'
+\set filename :abs_builddir '/results/lotest.txt'
+\lo_import :filename
\set newloid_1 :LASTOID
-- MISC
--
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+\getenv abs_builddir PG_ABS_BUILDDIR
+
--
-- BTREE
--
--
-- copy
--
-COPY onek TO '@abs_builddir@/results/onek.data';
+\set filename :abs_builddir '/results/onek.data'
+COPY onek TO :'filename';
DELETE FROM onek;
-COPY onek FROM '@abs_builddir@/results/onek.data';
+COPY onek FROM :'filename';
SELECT unique1 FROM onek WHERE unique1 < 2 ORDER BY unique1;
DELETE FROM onek2;
-COPY onek2 FROM '@abs_builddir@/results/onek.data';
+COPY onek2 FROM :'filename';
SELECT unique1 FROM onek2 WHERE unique1 < 2 ORDER BY unique1;
-COPY BINARY stud_emp TO '@abs_builddir@/results/stud_emp.data';
+\set filename :abs_builddir '/results/stud_emp.data'
+COPY BINARY stud_emp TO :'filename';
DELETE FROM stud_emp;
-COPY BINARY stud_emp FROM '@abs_builddir@/results/stud_emp.data';
+COPY BINARY stud_emp FROM :'filename';
SELECT * FROM stud_emp;
+-- directory paths are passed to us in environment variables
+\getenv abs_builddir PG_ABS_BUILDDIR
+
+\set testtablespace :abs_builddir '/testtablespace'
+
-- create a tablespace using WITH clause
-CREATE TABLESPACE regress_tblspacewith LOCATION '@testtablespace@' WITH (some_nonexistent_parameter = true); -- fail
-CREATE TABLESPACE regress_tblspacewith LOCATION '@testtablespace@' WITH (random_page_cost = 3.0); -- ok
+CREATE TABLESPACE regress_tblspacewith LOCATION :'testtablespace' WITH (some_nonexistent_parameter = true); -- fail
+CREATE TABLESPACE regress_tblspacewith LOCATION :'testtablespace' WITH (random_page_cost = 3.0); -- ok
-- check to see the parameter was used
SELECT spcoptions FROM pg_tablespace WHERE spcname = 'regress_tblspacewith';
DROP TABLESPACE regress_tblspacewith;
-- create a tablespace we can use
-CREATE TABLESPACE regress_tblspace LOCATION '@testtablespace@';
+CREATE TABLESPACE regress_tblspace LOCATION :'testtablespace';
-- try setting and resetting some properties for the new tablespace
ALTER TABLESPACE regress_tblspace SET (random_page_cost = 1.0, seq_page_cost = 1.1);
-- - UNIQUE clauses
-- - EXCLUDE clauses
--
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
--
-- DEFAULT syntax
--
CREATE TABLE COPY_TBL (x INT, y TEXT, z INT,
CONSTRAINT COPY_CON
CHECK (x > 3 AND y <> 'check failed' AND x < 7 ));
-COPY COPY_TBL FROM '@abs_srcdir@/data/constro.data';
+\set filename :abs_srcdir '/data/constro.data'
+COPY COPY_TBL FROM :'filename';
SELECT * FROM COPY_TBL;
x | y | z
---+---------------+---
6 | OK | 4
(2 rows)
-COPY COPY_TBL FROM '@abs_srcdir@/data/constrf.data';
+\set filename :abs_srcdir '/data/constrf.data'
+COPY COPY_TBL FROM :'filename';
ERROR: new row for relation "copy_tbl" violates check constraint "copy_con"
DETAIL: Failing row contains (7, check failed, 6).
CONTEXT: COPY copy_tbl, line 2: "7 check failed 6"
--
-- COPY
--
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+\getenv abs_builddir PG_ABS_BUILDDIR
-- CLASS POPULATION
-- (any resemblance to real life is purely coincidental)
--
-COPY aggtest FROM '@abs_srcdir@/data/agg.data';
-COPY onek FROM '@abs_srcdir@/data/onek.data';
-COPY onek TO '@abs_builddir@/results/onek.data';
+\set filename :abs_srcdir '/data/agg.data'
+COPY aggtest FROM :'filename';
+\set filename :abs_srcdir '/data/onek.data'
+COPY onek FROM :'filename';
+\set filename :abs_builddir '/results/onek.data'
+COPY onek TO :'filename';
DELETE FROM onek;
-COPY onek FROM '@abs_builddir@/results/onek.data';
-COPY tenk1 FROM '@abs_srcdir@/data/tenk.data';
-COPY slow_emp4000 FROM '@abs_srcdir@/data/rect.data';
-COPY person FROM '@abs_srcdir@/data/person.data';
-COPY emp FROM '@abs_srcdir@/data/emp.data';
-COPY student FROM '@abs_srcdir@/data/student.data';
-COPY stud_emp FROM '@abs_srcdir@/data/stud_emp.data';
-COPY road FROM '@abs_srcdir@/data/streets.data';
-COPY real_city FROM '@abs_srcdir@/data/real_city.data';
-COPY hash_i4_heap FROM '@abs_srcdir@/data/hash.data';
-COPY hash_name_heap FROM '@abs_srcdir@/data/hash.data';
-COPY hash_txt_heap FROM '@abs_srcdir@/data/hash.data';
-COPY hash_f8_heap FROM '@abs_srcdir@/data/hash.data';
-COPY test_tsvector FROM '@abs_srcdir@/data/tsearch.data';
-COPY testjsonb FROM '@abs_srcdir@/data/jsonb.data';
+COPY onek FROM :'filename';
+\set filename :abs_srcdir '/data/tenk.data'
+COPY tenk1 FROM :'filename';
+\set filename :abs_srcdir '/data/rect.data'
+COPY slow_emp4000 FROM :'filename';
+\set filename :abs_srcdir '/data/person.data'
+COPY person FROM :'filename';
+\set filename :abs_srcdir '/data/emp.data'
+COPY emp FROM :'filename';
+\set filename :abs_srcdir '/data/student.data'
+COPY student FROM :'filename';
+\set filename :abs_srcdir '/data/stud_emp.data'
+COPY stud_emp FROM :'filename';
+\set filename :abs_srcdir '/data/streets.data'
+COPY road FROM :'filename';
+\set filename :abs_srcdir '/data/real_city.data'
+COPY real_city FROM :'filename';
+\set filename :abs_srcdir '/data/hash.data'
+COPY hash_i4_heap FROM :'filename';
+COPY hash_name_heap FROM :'filename';
+COPY hash_txt_heap FROM :'filename';
+COPY hash_f8_heap FROM :'filename';
+\set filename :abs_srcdir '/data/tsearch.data'
+COPY test_tsvector FROM :'filename';
+\set filename :abs_srcdir '/data/jsonb.data'
+COPY testjsonb FROM :'filename';
-- the data in this file has a lot of duplicates in the index key
-- fields, leading to long bucket chains and lots of table expansion.
-- this is therefore a stress test of the bucket overflow code (unlike
-- the data in hash.data, which has unique index keys).
--
--- COPY hash_ovfl_heap FROM '@abs_srcdir@/data/hashovfl.data';
-COPY bt_i4_heap FROM '@abs_srcdir@/data/desc.data';
-COPY bt_name_heap FROM '@abs_srcdir@/data/hash.data';
-COPY bt_txt_heap FROM '@abs_srcdir@/data/desc.data';
-COPY bt_f8_heap FROM '@abs_srcdir@/data/hash.data';
-COPY array_op_test FROM '@abs_srcdir@/data/array.data';
-COPY array_index_op_test FROM '@abs_srcdir@/data/array.data';
+-- \set filename :abs_srcdir '/data/hashovfl.data'
+-- COPY hash_ovfl_heap FROM :'filename';
+\set filename :abs_srcdir '/data/desc.data'
+COPY bt_i4_heap FROM :'filename';
+\set filename :abs_srcdir '/data/hash.data'
+COPY bt_name_heap FROM :'filename';
+\set filename :abs_srcdir '/data/desc.data'
+COPY bt_txt_heap FROM :'filename';
+\set filename :abs_srcdir '/data/hash.data'
+COPY bt_f8_heap FROM :'filename';
+\set filename :abs_srcdir '/data/array.data'
+COPY array_op_test FROM :'filename';
+\set filename :abs_srcdir '/data/array.data'
+COPY array_index_op_test FROM :'filename';
-- analyze all the data we just loaded, to ensure plan consistency
-- in later tests
ANALYZE aggtest;
insert into copytest values('Unix',E'abc\ndef',2);
insert into copytest values('Mac',E'abc\rdef',3);
insert into copytest values(E'esc\\ape',E'a\\r\\\r\\\n\\nb',4);
-copy copytest to '@abs_builddir@/results/copytest.csv' csv;
+\set filename :abs_builddir '/results/copytest.csv'
+copy copytest to :'filename' csv;
create temp table copytest2 (like copytest);
-copy copytest2 from '@abs_builddir@/results/copytest.csv' csv;
+copy copytest2 from :'filename' csv;
select * from copytest except select * from copytest2;
style | test | filler
-------+------+--------
truncate copytest2;
--- same test but with an escape char different from quote char
-copy copytest to '@abs_builddir@/results/copytest.csv' csv quote '''' escape E'\\';
-copy copytest2 from '@abs_builddir@/results/copytest.csv' csv quote '''' escape E'\\';
+copy copytest to :'filename' csv quote '''' escape E'\\';
+copy copytest2 from :'filename' csv quote '''' escape E'\\';
select * from copytest except select * from copytest2;
style | test | filler
-------+------+--------
insert into parted_copytest select x,1,'One' from generate_series(1,1000) x;
insert into parted_copytest select x,2,'Two' from generate_series(1001,1010) x;
insert into parted_copytest select x,1,'One' from generate_series(1011,1020) x;
-copy (select * from parted_copytest order by a) to '@abs_builddir@/results/parted_copytest.csv';
+\set filename :abs_builddir '/results/parted_copytest.csv'
+copy (select * from parted_copytest order by a) to :'filename';
truncate parted_copytest;
-copy parted_copytest from '@abs_builddir@/results/parted_copytest.csv';
+copy parted_copytest from :'filename';
-- Ensure COPY FREEZE errors for partitioned tables.
begin;
truncate parted_copytest;
-copy parted_copytest from '@abs_builddir@/results/parted_copytest.csv' (freeze);
+copy parted_copytest from :'filename' (freeze);
ERROR: cannot perform COPY FREEZE on a partitioned table
rollback;
select tableoid::regclass,count(*),sum(a) from parted_copytest
before insert on parted_copytest_a2
for each row
execute procedure part_ins_func();
-copy parted_copytest from '@abs_builddir@/results/parted_copytest.csv';
+copy parted_copytest from :'filename';
select tableoid::regclass,count(*),sum(a) from parted_copytest
group by tableoid order by tableoid::regclass::name;
tableoid | count | sum
INFO: progress: {"type": "PIPE", "command": "COPY FROM", "relname": "tab_progress_reporting", "has_bytes_total": false, "tuples_excluded": 0, "tuples_processed": 3, "has_bytes_processed": true}
-- Generate COPY FROM report with FILE, with some excluded tuples.
truncate tab_progress_reporting;
-copy tab_progress_reporting from '@abs_srcdir@/data/emp.data'
+\set filename :abs_srcdir '/data/emp.data'
+copy tab_progress_reporting from :'filename'
where (salary < 2000);
INFO: progress: {"type": "FILE", "command": "COPY FROM", "relname": "tab_progress_reporting", "has_bytes_total": true, "tuples_excluded": 1, "tuples_processed": 2, "has_bytes_processed": true}
drop trigger check_after_tab_progress_reporting on tab_progress_reporting;
--
-- CREATE_FUNCTION_0
--
+-- directory path and dlsuffix are passed to us in environment variables
+\getenv libdir PG_LIBDIR
+\getenv dlsuffix PG_DLSUFFIX
+\set autoinclib :libdir '/autoinc' :dlsuffix
+\set refintlib :libdir '/refint' :dlsuffix
+\set regresslib :libdir '/regress' :dlsuffix
-- Create a bunch of C functions that will be used by later tests:
CREATE FUNCTION check_primary_key ()
RETURNS trigger
- AS '@libdir@/refint@DLSUFFIX@'
+ AS :'refintlib'
LANGUAGE C;
CREATE FUNCTION check_foreign_key ()
RETURNS trigger
- AS '@libdir@/refint@DLSUFFIX@'
+ AS :'refintlib'
LANGUAGE C;
CREATE FUNCTION autoinc ()
RETURNS trigger
- AS '@libdir@/autoinc@DLSUFFIX@'
+ AS :'autoinclib'
LANGUAGE C;
CREATE FUNCTION trigger_return_old ()
RETURNS trigger
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C;
CREATE FUNCTION ttdummy ()
RETURNS trigger
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C;
CREATE FUNCTION set_ttdummy (int4)
RETURNS int4
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION make_tuple_indirect (record)
RETURNS record
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION test_atomic_ops()
RETURNS bool
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C;
CREATE FUNCTION test_fdw_handler()
RETURNS fdw_handler
- AS '@libdir@/regress@DLSUFFIX@', 'test_fdw_handler'
+ AS :'regresslib', 'test_fdw_handler'
LANGUAGE C;
CREATE FUNCTION test_support_func(internal)
RETURNS internal
- AS '@libdir@/regress@DLSUFFIX@', 'test_support_func'
+ AS :'regresslib', 'test_support_func'
LANGUAGE C STRICT;
CREATE FUNCTION test_opclass_options_func(internal)
RETURNS void
- AS '@libdir@/regress@DLSUFFIX@', 'test_opclass_options_func'
+ AS :'regresslib', 'test_opclass_options_func'
LANGUAGE C;
CREATE FUNCTION test_enc_conversion(bytea, name, name, bool, validlen OUT int, result OUT bytea)
- AS '@libdir@/regress@DLSUFFIX@', 'test_enc_conversion'
+ AS :'regresslib', 'test_enc_conversion'
LANGUAGE C STRICT;
CREATE FUNCTION binary_coercible(oid, oid)
RETURNS bool
- AS '@libdir@/regress@DLSUFFIX@', 'binary_coercible'
+ AS :'regresslib', 'binary_coercible'
LANGUAGE C STRICT STABLE PARALLEL SAFE;
-- Things that shouldn't work:
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE C
AS 'nosuchfile';
ERROR: could not access file "nosuchfile": No such file or directory
+-- To produce stable regression test output, we have to filter the name
+-- of the regresslib file out of the error message in this test.
+\set VERBOSITY sqlstate
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE C
- AS '@libdir@/regress@DLSUFFIX@', 'nosuchsymbol';
-ERROR: could not find function "nosuchsymbol" in file "@libdir@/regress@DLSUFFIX@"
+ AS :'regresslib', 'nosuchsymbol';
+ERROR: 42883
+\set VERBOSITY default
+SELECT regexp_replace(:'LAST_ERROR_MESSAGE', 'file ".*"', 'file "..."');
+ regexp_replace
+------------------------------------------------------
+ could not find function "nosuchsymbol" in file "..."
+(1 row)
+
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE internal
AS 'nosuch';
ERROR: there is no built-in function named "nosuch"
--
-- CREATE_FUNCTION_1
--
+-- directory path and dlsuffix are passed to us in environment variables
+\getenv libdir PG_LIBDIR
+\getenv dlsuffix PG_DLSUFFIX
+\set regresslib :libdir '/regress' :dlsuffix
-- Create C functions needed by create_type.sql
CREATE FUNCTION widget_in(cstring)
RETURNS widget
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
NOTICE: type "widget" is not yet defined
DETAIL: Creating a shell type definition.
CREATE FUNCTION widget_out(widget)
RETURNS cstring
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
NOTICE: argument type widget is only a shell
CREATE FUNCTION int44in(cstring)
RETURNS city_budget
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
NOTICE: type "city_budget" is not yet defined
DETAIL: Creating a shell type definition.
CREATE FUNCTION int44out(city_budget)
RETURNS cstring
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
NOTICE: argument type city_budget is only a shell
--
-- CREATE_FUNCTION_2
--
+-- directory path and dlsuffix are passed to us in environment variables
+\getenv libdir PG_LIBDIR
+\getenv dlsuffix PG_DLSUFFIX
+\set regresslib :libdir '/regress' :dlsuffix
CREATE FUNCTION hobbies(person)
RETURNS setof hobbies_r
AS 'select * from hobbies_r where person = $1.name'
LANGUAGE SQL;
CREATE FUNCTION pt_in_widget(point, widget)
RETURNS bool
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION overpaid(emp)
RETURNS bool
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION interpt_pp(path, path)
RETURNS point
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION reverse_name(name)
RETURNS name
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
--
-- Function dynamic loading
--
-LOAD '@libdir@/regress@DLSUFFIX@';
+LOAD :'regresslib';
--
-- Test large object support
--
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+\getenv abs_builddir PG_ABS_BUILDDIR
-- ensure consistent test output regardless of the default bytea format
SET bytea_output TO escape;
-- Load a file
(1 row)
ABORT;
-DO $$
-DECLARE
- loid oid;
-BEGIN
- SELECT tbl.loid INTO loid FROM lotest_stash_values tbl;
- PERFORM lo_export(loid, '@abs_builddir@/results/invalid/path');
-EXCEPTION
- WHEN UNDEFINED_FILE THEN RAISE NOTICE 'could not open file, as expected';
-END;
-$$;
+\set filename :abs_builddir '/results/invalid/path'
+\set dobody 'DECLARE loid oid; BEGIN '
+\set dobody :dobody 'SELECT tbl.loid INTO loid FROM lotest_stash_values tbl; '
+\set dobody :dobody 'PERFORM lo_export(loid, ' :'filename' '); '
+\set dobody :dobody 'EXCEPTION WHEN UNDEFINED_FILE THEN '
+\set dobody :dobody 'RAISE NOTICE ''could not open file, as expected''; END'
+DO :'dobody';
NOTICE: could not open file, as expected
-- Test truncation.
BEGIN;
(1 row)
TRUNCATE lotest_stash_values;
-INSERT INTO lotest_stash_values (loid) SELECT lo_import('@abs_srcdir@/data/tenk.data');
+\set filename :abs_srcdir '/data/tenk.data'
+INSERT INTO lotest_stash_values (loid) SELECT lo_import(:'filename');
BEGIN;
UPDATE lotest_stash_values SET fd=lo_open(loid, CAST(x'20000' | x'40000' AS integer));
-- verify length of large object
(1 row)
END;
-SELECT lo_export(loid, '@abs_builddir@/results/lotest.txt') FROM lotest_stash_values;
+\set filename :abs_builddir '/results/lotest.txt'
+SELECT lo_export(loid, :'filename') FROM lotest_stash_values;
lo_export
-----------
1
(1 row)
-\lo_import '@abs_builddir@/results/lotest.txt'
+\lo_import :filename
\set newloid :LASTOID
-- just make sure \lo_export does not barf
-\lo_export :newloid '@abs_builddir@/results/lotest2.txt'
+\set filename :abs_builddir '/results/lotest2.txt'
+\lo_export :newloid :filename
-- This is a hack to test that export/import are reversible
-- This uses knowledge about the inner workings of large object mechanism
-- which should not be used outside it. This makes it a HACK
TRUNCATE lotest_stash_values;
\lo_unlink :newloid
-\lo_import '@abs_builddir@/results/lotest.txt'
+\set filename :abs_builddir '/results/lotest.txt'
+\lo_import :filename
\set newloid_1 :LASTOID
SELECT lo_from_bytea(0, lo_get(:newloid_1)) AS newloid_2
\gset
--
-- Test large object support
--
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+\getenv abs_builddir PG_ABS_BUILDDIR
-- ensure consistent test output regardless of the default bytea format
SET bytea_output TO escape;
-- Load a file
(1 row)
ABORT;
-DO $$
-DECLARE
- loid oid;
-BEGIN
- SELECT tbl.loid INTO loid FROM lotest_stash_values tbl;
- PERFORM lo_export(loid, '@abs_builddir@/results/invalid/path');
-EXCEPTION
- WHEN UNDEFINED_FILE THEN RAISE NOTICE 'could not open file, as expected';
-END;
-$$;
+\set filename :abs_builddir '/results/invalid/path'
+\set dobody 'DECLARE loid oid; BEGIN '
+\set dobody :dobody 'SELECT tbl.loid INTO loid FROM lotest_stash_values tbl; '
+\set dobody :dobody 'PERFORM lo_export(loid, ' :'filename' '); '
+\set dobody :dobody 'EXCEPTION WHEN UNDEFINED_FILE THEN '
+\set dobody :dobody 'RAISE NOTICE ''could not open file, as expected''; END'
+DO :'dobody';
NOTICE: could not open file, as expected
-- Test truncation.
BEGIN;
(1 row)
TRUNCATE lotest_stash_values;
-INSERT INTO lotest_stash_values (loid) SELECT lo_import('@abs_srcdir@/data/tenk.data');
+\set filename :abs_srcdir '/data/tenk.data'
+INSERT INTO lotest_stash_values (loid) SELECT lo_import(:'filename');
BEGIN;
UPDATE lotest_stash_values SET fd=lo_open(loid, CAST(x'20000' | x'40000' AS integer));
-- verify length of large object
(1 row)
END;
-SELECT lo_export(loid, '@abs_builddir@/results/lotest.txt') FROM lotest_stash_values;
+\set filename :abs_builddir '/results/lotest.txt'
+SELECT lo_export(loid, :'filename') FROM lotest_stash_values;
lo_export
-----------
1
(1 row)
-\lo_import '@abs_builddir@/results/lotest.txt'
+\lo_import :filename
\set newloid :LASTOID
-- just make sure \lo_export does not barf
-\lo_export :newloid '@abs_builddir@/results/lotest2.txt'
+\set filename :abs_builddir '/results/lotest2.txt'
+\lo_export :newloid :filename
-- This is a hack to test that export/import are reversible
-- This uses knowledge about the inner workings of large object mechanism
-- which should not be used outside it. This makes it a HACK
TRUNCATE lotest_stash_values;
\lo_unlink :newloid
-\lo_import '@abs_builddir@/results/lotest.txt'
+\set filename :abs_builddir '/results/lotest.txt'
+\lo_import :filename
\set newloid_1 :LASTOID
SELECT lo_from_bytea(0, lo_get(:newloid_1)) AS newloid_2
\gset
--
-- MISC
--
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+\getenv abs_builddir PG_ABS_BUILDDIR
--
-- BTREE
--
--
-- copy
--
-COPY onek TO '@abs_builddir@/results/onek.data';
+\set filename :abs_builddir '/results/onek.data'
+COPY onek TO :'filename';
DELETE FROM onek;
-COPY onek FROM '@abs_builddir@/results/onek.data';
+COPY onek FROM :'filename';
SELECT unique1 FROM onek WHERE unique1 < 2 ORDER BY unique1;
unique1
---------
(2 rows)
DELETE FROM onek2;
-COPY onek2 FROM '@abs_builddir@/results/onek.data';
+COPY onek2 FROM :'filename';
SELECT unique1 FROM onek2 WHERE unique1 < 2 ORDER BY unique1;
unique1
---------
1
(2 rows)
-COPY BINARY stud_emp TO '@abs_builddir@/results/stud_emp.data';
+\set filename :abs_builddir '/results/stud_emp.data'
+COPY BINARY stud_emp TO :'filename';
DELETE FROM stud_emp;
-COPY BINARY stud_emp FROM '@abs_builddir@/results/stud_emp.data';
+COPY BINARY stud_emp FROM :'filename';
SELECT * FROM stud_emp;
name | age | location | salary | manager | gpa | percent
-------+-----+------------+--------+---------+-----+---------
+-- directory paths are passed to us in environment variables
+\getenv abs_builddir PG_ABS_BUILDDIR
+\set testtablespace :abs_builddir '/testtablespace'
-- create a tablespace using WITH clause
-CREATE TABLESPACE regress_tblspacewith LOCATION '@testtablespace@' WITH (some_nonexistent_parameter = true); -- fail
+CREATE TABLESPACE regress_tblspacewith LOCATION :'testtablespace' WITH (some_nonexistent_parameter = true); -- fail
ERROR: unrecognized parameter "some_nonexistent_parameter"
-CREATE TABLESPACE regress_tblspacewith LOCATION '@testtablespace@' WITH (random_page_cost = 3.0); -- ok
+CREATE TABLESPACE regress_tblspacewith LOCATION :'testtablespace' WITH (random_page_cost = 3.0); -- ok
-- check to see the parameter was used
SELECT spcoptions FROM pg_tablespace WHERE spcname = 'regress_tblspacewith';
spcoptions
-- drop the tablespace so we can re-use the location
DROP TABLESPACE regress_tblspacewith;
-- create a tablespace we can use
-CREATE TABLESPACE regress_tblspace LOCATION '@testtablespace@';
+CREATE TABLESPACE regress_tblspace LOCATION :'testtablespace';
-- try setting and resetting some properties for the new tablespace
ALTER TABLESPACE regress_tblspace SET (random_page_cost = 1.0, seq_page_cost = 1.1);
ALTER TABLESPACE regress_tblspace SET (some_nonexistent_parameter = true); -- fail
*/
setenv("PGAPPNAME", "pg_regress", 1);
+ /*
+ * Set variables that the test scripts may need to refer to.
+ */
+ setenv("PG_ABS_SRCDIR", inputdir, 1);
+ setenv("PG_ABS_BUILDDIR", outputdir, 1);
+ setenv("PG_LIBDIR", dlpath, 1);
+ setenv("PG_DLSUFFIX", DLSUFFIX, 1);
+
if (nolocale)
{
/*