1/* 2 * Loading from a ZIP archive containing CSV files. The full test can be 3 * done with using the archive found at 4 * http://geolite.maxmind.com/download/geoip/database/GeoLiteCity_CSV/GeoLiteCity-latest.zip 5 * 6 * And a very light version of this data set is found at 7 * http://pgsql.tapoueh.org/temp/foo.zip for quick testing. 8 */ 9 10LOAD ARCHIVE 11 -- FROM http://pgsql.tapoueh.org/temp/foo.zip 12 FROM http://geolite.maxmind.com/download/geoip/database/GeoLiteCity_CSV/GeoLiteCity-latest.zip 13 INTO postgresql:///ip4r 14 15 BEFORE LOAD 16 DO 17 $$ create extension if not exists ip4r; $$, 18 $$ create schema if not exists geolite; $$ 19 20 EXECUTE 'geolite.sql' 21 22 LOAD CSV 23 FROM FILENAME MATCHING ~/GeoLiteCity-Location.csv/ 24 WITH ENCODING iso-8859-1 25 ( 26 locId, 27 country, 28 region [ null if blanks ], 29 city [ null if blanks ], 30 postalCode [ null if blanks ], 31 latitude, 32 longitude, 33 metroCode [ null if blanks ], 34 areaCode [ null if blanks ] 35 ) 36 INTO postgresql:///ip4r?geolite.location 37 ( 38 locid,country,region,city,postalCode, 39 location point using (format nil "(~a,~a)" longitude latitude), 40 metroCode,areaCode 41 ) 42 WITH skip header = 2, 43 fields optionally enclosed by '"', 44 fields escaped by double-quote, 45 fields terminated by ',' 46 47 AND LOAD CSV 48 FROM FILENAME MATCHING ~/GeoLiteCity-Blocks.csv/ 49 WITH ENCODING iso-8859-1 50 ( 51 startIpNum, endIpNum, locId 52 ) 53 INTO postgresql:///ip4r?geolite.blocks 54 ( 55 iprange ip4r using (ip-range startIpNum endIpNum), 56 locId 57 ) 58 WITH skip header = 2, 59 fields optionally enclosed by '"', 60 fields escaped by double-quote, 61 fields terminated by ',' 62 63 AFTER LOAD DO 64 $$ create index blocks_ip4r_idx on geolite.blocks using gist(iprange); $$; 65