1"""Contains various object definitions needed by the weather utility."""
2
3weather_copyright = """\
4# Copyright (c) 2006-2021 Jeremy Stanley <fungi@yuggoth.org>. Permission to
5# use, copy, modify, and distribute this software is granted under terms
6# provided in the LICENSE file distributed with this software.
7#"""
8
9weather_version = "2.4.1"
10
11radian_to_km = 6372.795484
12radian_to_mi = 3959.871528
13
14def pyversion(ref=None):
15    """Determine the Python version and optionally compare to a reference."""
16    import platform
17    ver = platform.python_version()
18    if ref:
19        return [
20            int(x) for x in ver.split(".")[:2]
21        ] >= [
22            int(x) for x in ref.split(".")[:2]
23        ]
24    else: return ver
25
26class Selections:
27    """An object to contain selection data."""
28    def __init__(self):
29        """Store the config, options and arguments."""
30        self.config = get_config()
31        self.options, self.arguments = get_options(self.config)
32        if self.get_bool("cache") and self.get_bool("cache_search") \
33            and not self.get_bool("longlist"):
34            integrate_search_cache(
35                self.config,
36                self.get("cachedir"),
37                self.get("setpath")
38            )
39        if not self.arguments:
40            if "id" in self.options.__dict__ \
41                and self.options.__dict__["id"]:
42                self.arguments.append( self.options.__dict__["id"] )
43                del( self.options.__dict__["id"] )
44                import sys
45                message = "WARNING: the --id option is deprecated and will eventually be removed\n"
46                sys.stderr.write(message)
47            elif "city" in self.options.__dict__ \
48                and self.options.__dict__["city"] \
49                and "st" in self.options.__dict__ \
50                and self.options.__dict__["st"]:
51                self.arguments.append(
52                    "^%s city, %s" % (
53                        self.options.__dict__["city"],
54                        self.options.__dict__["st"]
55                    )
56                )
57                del( self.options.__dict__["city"] )
58                del( self.options.__dict__["st"] )
59                import sys
60                message = "WARNING: the --city/--st options are deprecated and will eventually be removed\n"
61                sys.stderr.write(message)
62    def get(self, option, argument=None):
63        """Retrieve data from the config or options."""
64        if argument:
65            if self.config.has_section(argument) and (
66                self.config.has_option(argument, "city") \
67                    or self.config.has_option(argument, "id") \
68                    or self.config.has_option(argument, "st")
69            ):
70                self.config.remove_section(argument)
71                import sys
72                message = "WARNING: the city/id/st options are now unsupported in aliases\n"
73                sys.stderr.write(message)
74            if not self.config.has_section(argument):
75                guessed = guess(
76                    argument,
77                    path=self.get("setpath"),
78                    info=self.get("info"),
79                    cache_search=(
80                        self.get("cache") and self.get("cache_search")
81                    ),
82                    cachedir=self.get("cachedir"),
83                    quiet=self.get_bool("quiet")
84                )
85                self.config.add_section(argument)
86                for item in guessed.items():
87                    self.config.set(argument, *item)
88            if self.config.has_option(argument, option):
89                return self.config.get(argument, option)
90        if option in self.options.__dict__:
91            return self.options.__dict__[option]
92        import sys
93        message = "WARNING: no URI defined for %s\n" % option
94        sys.stderr.write(message)
95        return None
96    def get_bool(self, option, argument=None):
97        """Get data and coerce to a boolean if necessary."""
98        # Mimic configparser's getboolean() method by treating
99        # false/no/off/0 as False and true/yes/on/1 as True values,
100        # case-insensitively
101        value = self.get(option, argument)
102        if isinstance(value, bool):
103            return value
104        if isinstance(value, str):
105            vlower = value.lower()
106            if vlower in ('false', 'no', 'off', '0'):
107                return False
108            elif vlower in ('true', 'yes', 'on', '1'):
109                return True
110        raise ValueError("Not a boolean: %s" % value)
111    def getint(self, option, argument=None):
112        """Get data and coerce to an integer if necessary."""
113        value = self.get(option, argument)
114        if value: return int(value)
115        else: return 0
116
117def average(coords):
118    """Average a list of coordinates."""
119    x = 0
120    y = 0
121    for coord in coords:
122        x += coord[0]
123        y += coord[1]
124    count = len(coords)
125    return (x/count, y/count)
126
127def filter_units(line, units="imperial"):
128    """Filter or convert units in a line of text between US/UK and metric."""
129    import re
130    # filter lines with both pressures in the form of "X inches (Y hPa)" or
131    # "X in. Hg (Y hPa)"
132    dual_p = re.match(
133        "(.* )(\d*(\.\d+)? (inches|in\. Hg)) \((\d*(\.\d+)? hPa)\)(.*)",
134        line
135    )
136    if dual_p:
137        preamble, in_hg, i_fr, i_un, hpa, h_fr, trailer = dual_p.groups()
138        if units == "imperial": line = preamble + in_hg + trailer
139        elif units == "metric": line = preamble + hpa + trailer
140    # filter lines with both temperatures in the form of "X F (Y C)"
141    dual_t = re.match(
142        "(.* )(-?\d*(\.\d+)? F) \((-?\d*(\.\d+)? C)\)(.*)",
143        line
144    )
145    if dual_t:
146        preamble, fahrenheit, f_fr, celsius, c_fr, trailer = dual_t.groups()
147        if units == "imperial": line = preamble + fahrenheit + trailer
148        elif units == "metric": line = preamble + celsius + trailer
149    # if metric is desired, convert distances in the form of "X mile(s)" to
150    # "Y kilometer(s)"
151    if units == "metric":
152        imperial_d = re.match(
153            "(.* )(\d+)( mile\(s\))(.*)",
154            line
155        )
156        if imperial_d:
157            preamble, mi, m_u, trailer = imperial_d.groups()
158            line = preamble + str(int(round(int(mi)*1.609344))) \
159                + " kilometer(s)" + trailer
160    # filter speeds in the form of "X MPH (Y KT)" to just "X MPH"; if metric is
161    # desired, convert to "Z KPH"
162    imperial_s = re.match(
163        "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
164        line
165    )
166    if imperial_s:
167        preamble, mph, m_u, kt, trailer = imperial_s.groups()
168        if units == "imperial": line = preamble + mph + m_u + trailer
169        elif units == "metric":
170            line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
171                trailer
172    imperial_s = re.match(
173        "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
174        line
175    )
176    if imperial_s:
177        preamble, mph, m_u, kt, trailer = imperial_s.groups()
178        if units == "imperial": line = preamble + mph + m_u + trailer
179        elif units == "metric":
180            line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
181                trailer
182    # if imperial is desired, qualify given forcast temperatures like "X F"; if
183    # metric is desired, convert to "Y C"
184    imperial_t = re.match(
185        "(.* )(High |high |Low |low )(\d+)(\.|,)(.*)",
186        line
187    )
188    if imperial_t:
189        preamble, parameter, fahrenheit, sep, trailer = imperial_t.groups()
190        if units == "imperial":
191            line = preamble + parameter + fahrenheit + " F" + sep + trailer
192        elif units == "metric":
193            line = preamble + parameter \
194                + str(int(round((int(fahrenheit)-32)*5/9))) + " C" + sep \
195                + trailer
196    # hand off the resulting line
197    return line
198
199def get_uri(
200    uri,
201    ignore_fail=False,
202    cache_data=False,
203    cacheage=900,
204    cachedir="."
205):
206    """Return a string containing the results of a URI GET."""
207    if pyversion("3"):
208        import urllib, urllib.error, urllib.request
209        URLError = urllib.error.URLError
210        urlopen = urllib.request.urlopen
211    else:
212        import urllib2 as urllib
213        URLError = urllib.URLError
214        urlopen = urllib.urlopen
215    import os, time
216    if cache_data:
217        dcachedir = os.path.join( os.path.expanduser(cachedir), "datacache" )
218        if not os.path.exists(dcachedir):
219            try: os.makedirs(dcachedir)
220            except (IOError, OSError): pass
221        dcache_fn = os.path.join(
222            dcachedir,
223            uri.split(":",1)[1].replace("/","_")
224        )
225    now = time.time()
226    if cache_data and os.access(dcache_fn, os.R_OK) \
227        and now-cacheage < os.stat(dcache_fn).st_mtime <= now:
228        dcache_fd = open(dcache_fn)
229        data = dcache_fd.read()
230        dcache_fd.close()
231    else:
232        try:
233            data = urlopen(uri).read().decode("utf-8")
234        except URLError:
235            if ignore_fail: return ""
236            import os, sys
237            sys.stderr.write("%s error: failed to retrieve\n   %s\n\n" % (
238                os.path.basename( sys.argv[0] ), uri))
239            raise
240        # Some data sources are HTML with the plain text wrapped in pre tags
241        if "<pre>" in data:
242            data = data[data.find("<pre>")+5:data.find("</pre>")]
243        if cache_data:
244            try:
245                import codecs
246                dcache_fd = codecs.open(dcache_fn, "w", "utf-8")
247                dcache_fd.write(data)
248                dcache_fd.close()
249            except (IOError, OSError): pass
250    return data
251
252def get_metar(
253    uri=None,
254    verbose=False,
255    quiet=False,
256    headers=None,
257    imperial=False,
258    metric=False,
259    cache_data=False,
260    cacheage=900,
261    cachedir="."
262):
263    """Return a summarized METAR for the specified station."""
264    if not uri:
265        import os, sys
266        message = "%s error: METAR URI required for conditions\n" % \
267            os.path.basename( sys.argv[0] )
268        sys.stderr.write(message)
269        sys.exit(1)
270    metar = get_uri(
271        uri,
272        cache_data=cache_data,
273        cacheage=cacheage,
274        cachedir=cachedir
275    )
276    if pyversion("3") and type(metar) is bytes: metar = metar.decode("utf-8")
277    if verbose: return metar
278    else:
279        import re
280        lines = metar.split("\n")
281        if not headers:
282            headers = \
283                "relative_humidity," \
284                + "precipitation_last_hour," \
285                + "sky conditions," \
286                + "temperature," \
287                + "heat index," \
288                + "windchill," \
289                + "weather," \
290                + "wind"
291        headerlist = headers.lower().replace("_"," ").split(",")
292        output = []
293        if not quiet:
294            title = "Current conditions at %s"
295            place = lines[0].split(", ")
296            if len(place) > 1:
297                place = "%s, %s" % ( place[0].title(), place[1] )
298            else: place = "<UNKNOWN>"
299            output.append(title%place)
300            output.append("Last updated " + lines[1])
301        header_match = False
302        for header in headerlist:
303            for line in lines:
304                if line.lower().startswith(header + ":"):
305                    if re.match(r".*:\d+$", line): line = line[:line.rfind(":")]
306                    if imperial: line = filter_units(line, units="imperial")
307                    elif metric: line = filter_units(line, units="metric")
308                    if quiet: output.append(line)
309                    else: output.append("   " + line)
310                    header_match = True
311        if not header_match:
312            output.append(
313                "(no conditions matched your header list, try with --verbose)"
314            )
315        return "\n".join(output)
316
317def get_alert(
318    uri=None,
319    verbose=False,
320    quiet=False,
321    cache_data=False,
322    cacheage=900,
323    cachedir="."
324):
325    """Return alert notice for the specified URI."""
326    if not uri:
327        return ""
328    alert = get_uri(
329        uri,
330        ignore_fail=True,
331        cache_data=cache_data,
332        cacheage=cacheage,
333        cachedir=cachedir
334    ).strip()
335    if pyversion("3") and type(alert) is bytes: alert = alert.decode("utf-8")
336    if alert:
337        if verbose: return alert
338        else:
339            if alert.find("\nNATIONAL WEATHER SERVICE") == -1:
340                muted = False
341            else:
342                muted = True
343            lines = alert.split("\n")
344            import time
345            valid_time = time.strftime("%Y%m%d%H%M")
346            output = []
347            for line in lines:
348                if line.startswith("Expires:") \
349                    and "Expires:" + valid_time > line:
350                    return ""
351                if muted and line.startswith("NATIONAL WEATHER SERVICE"):
352                    muted = False
353                    line = ""
354                elif line == "&&":
355                    line = ""
356                elif line == "$$":
357                    muted = True
358                if line and not muted:
359                    if quiet: output.append(line)
360                    else: output.append("   " + line)
361            return "\n".join(output)
362
363def get_options(config):
364    """Parse the options passed on the command line."""
365
366    # for optparse's builtin -h/--help option
367    usage = \
368        "usage: %prog [options] [alias1|search1 [alias2|search2 [...]]]"
369
370    # for optparse's builtin --version option
371    verstring = "%prog " + weather_version
372
373    # create the parser
374    import optparse
375    option_parser = optparse.OptionParser(usage=usage, version=verstring)
376    # separate options object from list of arguments and return both
377
378    # the -a/--alert option
379    if config.has_option("default", "alert"):
380        default_alert = config.getboolean("default", "alert")
381    else: default_alert = False
382    option_parser.add_option("-a", "--alert",
383        dest="alert",
384        action="store_true",
385        default=default_alert,
386        help="include local alert notices")
387
388    # the --atypes option
389    if config.has_option("default", "atypes"):
390        default_atypes = config.get("default", "atypes")
391    else:
392        default_atypes = \
393            "coastal_flood_statement," \
394            + "flash_flood_statement," \
395            + "flash_flood_warning," \
396            + "flash_flood_watch," \
397            + "flood_statement," \
398            + "flood_warning," \
399            + "severe_thunderstorm_warning," \
400            + "severe_weather_statement," \
401            + "special_weather_statement," \
402            + "urgent_weather_message"
403    option_parser.add_option("--atypes",
404        dest="atypes",
405        default=default_atypes,
406        help="list of alert notification types to display")
407
408    # the --build-sets option
409    option_parser.add_option("--build-sets",
410        dest="build_sets",
411        action="store_true",
412        default=False,
413        help="(re)build location correlation sets")
414
415    # the --cacheage option
416    if config.has_option("default", "cacheage"):
417        default_cacheage = config.getint("default", "cacheage")
418    else: default_cacheage = 900
419    option_parser.add_option("--cacheage",
420        dest="cacheage",
421        default=default_cacheage,
422        help="duration in seconds to refresh cached data")
423
424    # the --cachedir option
425    if config.has_option("default", "cachedir"):
426        default_cachedir = config.get("default", "cachedir")
427    else: default_cachedir = "~/.weather"
428    option_parser.add_option("--cachedir",
429        dest="cachedir",
430        default=default_cachedir,
431        help="directory for storing cached searches and data")
432
433    # the -f/--forecast option
434    if config.has_option("default", "forecast"):
435        default_forecast = config.getboolean("default", "forecast")
436    else: default_forecast = False
437    option_parser.add_option("-f", "--forecast",
438        dest="forecast",
439        action="store_true",
440        default=default_forecast,
441        help="include a local forecast")
442
443    # the --headers option
444    if config.has_option("default", "headers"):
445        default_headers = config.get("default", "headers")
446    else:
447        default_headers = \
448            "temperature," \
449            + "relative_humidity," \
450            + "wind," \
451            + "heat_index," \
452            + "windchill," \
453            + "weather," \
454            + "sky_conditions," \
455            + "precipitation_last_hour"
456    option_parser.add_option("--headers",
457        dest="headers",
458        default=default_headers,
459        help="list of conditions headers to display")
460
461    # the --imperial option
462    if config.has_option("default", "imperial"):
463        default_imperial = config.getboolean("default", "imperial")
464    else: default_imperial = False
465    option_parser.add_option("--imperial",
466        dest="imperial",
467        action="store_true",
468        default=default_imperial,
469        help="filter/convert conditions for US/UK units")
470
471    # the --info option
472    option_parser.add_option("--info",
473        dest="info",
474        action="store_true",
475        default=False,
476        help="output detailed information for your search")
477
478    # the -l/--list option
479    option_parser.add_option("-l", "--list",
480        dest="list",
481        action="store_true",
482        default=False,
483        help="list all configured aliases and cached searches")
484
485    # the --longlist option
486    option_parser.add_option("--longlist",
487        dest="longlist",
488        action="store_true",
489        default=False,
490        help="display details of all configured aliases")
491
492    # the -m/--metric option
493    if config.has_option("default", "metric"):
494        default_metric = config.getboolean("default", "metric")
495    else: default_metric = False
496    option_parser.add_option("-m", "--metric",
497        dest="metric",
498        action="store_true",
499        default=default_metric,
500        help="filter/convert conditions for metric units")
501
502    # the -n/--no-conditions option
503    if config.has_option("default", "conditions"):
504        default_conditions = config.getboolean("default", "conditions")
505    else: default_conditions = True
506    option_parser.add_option("-n", "--no-conditions",
507        dest="conditions",
508        action="store_false",
509        default=default_conditions,
510        help="disable output of current conditions")
511
512    # the --no-cache option
513    if config.has_option("default", "cache"):
514        default_cache = config.getboolean("default", "cache")
515    else: default_cache = True
516    option_parser.add_option("--no-cache",
517        dest="cache",
518        action="store_false",
519        default=True,
520        help="disable all caching (searches and data)")
521
522    # the --no-cache-data option
523    if config.has_option("default", "cache_data"):
524        default_cache_data = config.getboolean("default", "cache_data")
525    else: default_cache_data = True
526    option_parser.add_option("--no-cache-data",
527        dest="cache_data",
528        action="store_false",
529        default=True,
530        help="disable retrieved data caching")
531
532    # the --no-cache-search option
533    if config.has_option("default", "cache_search"):
534        default_cache_search = config.getboolean("default", "cache_search")
535    else: default_cache_search = True
536    option_parser.add_option("--no-cache-search",
537        dest="cache_search",
538        action="store_false",
539        default=True,
540        help="disable search result caching")
541
542    # the -q/--quiet option
543    if config.has_option("default", "quiet"):
544        default_quiet = config.getboolean("default", "quiet")
545    else: default_quiet = False
546    option_parser.add_option("-q", "--quiet",
547        dest="quiet",
548        action="store_true",
549        default=default_quiet,
550        help="skip preambles and don't indent")
551
552    # the --setpath option
553    if config.has_option("default", "setpath"):
554        default_setpath = config.get("default", "setpath")
555    else: default_setpath = ".:~/.weather"
556    option_parser.add_option("--setpath",
557        dest="setpath",
558        default=default_setpath,
559        help="directory search path for correlation sets")
560
561    # the -v/--verbose option
562    if config.has_option("default", "verbose"):
563        default_verbose = config.getboolean("default", "verbose")
564    else: default_verbose = False
565    option_parser.add_option("-v", "--verbose",
566        dest="verbose",
567        action="store_true",
568        default=default_verbose,
569        help="show full decoded feeds")
570
571    # deprecated options
572    if config.has_option("default", "city"):
573        default_city = config.get("default", "city")
574    else: default_city = ""
575    option_parser.add_option("-c", "--city",
576        dest="city",
577        default=default_city,
578        help=optparse.SUPPRESS_HELP)
579    if config.has_option("default", "id"):
580        default_id = config.get("default", "id")
581    else: default_id = ""
582    option_parser.add_option("-i", "--id",
583        dest="id",
584        default=default_id,
585        help=optparse.SUPPRESS_HELP)
586    if config.has_option("default", "st"):
587        default_st = config.get("default", "st")
588    else: default_st = ""
589    option_parser.add_option("-s", "--st",
590        dest="st",
591        default=default_st,
592        help=optparse.SUPPRESS_HELP)
593
594    options, arguments = option_parser.parse_args()
595    return options, arguments
596
597def get_config():
598    """Parse the aliases and configuration."""
599    if pyversion("3"): import configparser
600    else: import ConfigParser as configparser
601    config = configparser.ConfigParser()
602    import os
603    rcfiles = [
604        "/etc/weatherrc",
605        "/etc/weather/weatherrc",
606        os.path.expanduser("~/.weather/weatherrc"),
607        os.path.expanduser("~/.weatherrc"),
608        "weatherrc"
609        ]
610    for rcfile in rcfiles:
611        if os.access(rcfile, os.R_OK): config.read(rcfile)
612    for section in config.sections():
613        if section != section.lower():
614            if config.has_section(section.lower()):
615                config.remove_section(section.lower())
616            config.add_section(section.lower())
617            for option,value in config.items(section):
618                config.set(section.lower(), option, value)
619    return config
620
621def integrate_search_cache(config, cachedir, setpath):
622    """Add cached search results into the configuration."""
623    if pyversion("3"): import configparser
624    else: import ConfigParser as configparser
625    import os, time
626    scache_fn = os.path.join( os.path.expanduser(cachedir), "searches" )
627    if not os.access(scache_fn, os.R_OK): return config
628    scache_fd = open(scache_fn)
629    created = float( scache_fd.readline().split(":")[1].strip().split()[0] )
630    scache_fd.close()
631    now = time.time()
632    datafiles = data_index(setpath)
633    if datafiles:
634        data_freshness = sorted(
635            [ x[1] for x in datafiles.values() ],
636            reverse=True
637        )[0]
638    else: data_freshness = now
639    if created < data_freshness <= now:
640        try:
641            os.remove(scache_fn)
642            print( "[clearing outdated %s]" % scache_fn )
643        except (IOError, OSError):
644            pass
645        return config
646    scache = configparser.ConfigParser()
647    scache.read(scache_fn)
648    for section in scache.sections():
649        if not config.has_section(section):
650            config.add_section(section)
651            for option,value in scache.items(section):
652                config.set(section, option, value)
653    return config
654
655def list_aliases(config, detail=False):
656    """Return a formatted list of aliases defined in the config."""
657    if detail:
658        output = "\n# configured alias details..."
659        for section in sorted(config.sections()):
660            output += "\n\n[%s]" % section
661            for item in sorted(config.items(section)):
662                output += "\n%s = %s" % item
663        output += "\n"
664    else:
665        output = "configured aliases and cached searches..."
666        for section in sorted(config.sections()):
667            if config.has_option(section, "description"):
668                description = config.get(section, "description")
669            else: description = "(no description provided)"
670            output += "\n   %s: %s" % (section, description)
671    return output
672
673def data_index(path):
674    import os
675    datafiles = {}
676    for filename in ("airports", "places", "stations", "zctas", "zones"):
677        for dirname in path.split(":"):
678            for extension in ("", ".gz", ".txt"):
679                candidate = os.path.expanduser(
680                    os.path.join( dirname, "".join( (filename, extension) ) )
681                )
682                if os.path.exists(candidate):
683                    datafiles[filename] = (
684                        candidate,
685                        os.stat(candidate).st_mtime
686                    )
687                    break
688            if filename in datafiles:
689                break
690    return datafiles
691
692def guess(
693    expression,
694    path=".",
695    max_results=20,
696    info=False,
697    cache_search=False,
698    cacheage=900,
699    cachedir=".",
700    quiet=False
701):
702    """Find URIs using airport, gecos, placename, station, ZCTA/ZIP, zone."""
703    import codecs, datetime, time, os, re, sys
704    if pyversion("3"): import configparser
705    else: import ConfigParser as configparser
706    datafiles = data_index(path)
707    if re.match("[A-Za-z]{3}$", expression): searchtype = "airport"
708    elif re.match("[A-Za-z0-9]{4}$", expression): searchtype = "station"
709    elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", expression): searchtype = "zone"
710    elif re.match("[0-9]{5}$", expression): searchtype = "ZCTA"
711    elif re.match(
712        r"[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?, *[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?$",
713        expression
714    ):
715        searchtype = "coordinates"
716    elif re.match(r"(FIPS|fips)\d+$", expression): searchtype = "FIPS"
717    else:
718        searchtype = "name"
719        cache_search = False
720    if cache_search: action = "caching"
721    else: action = "using"
722    if info:
723        scores = [
724            (0.005, "bad"),
725            (0.025, "poor"),
726            (0.160, "suspect"),
727            (0.500, "mediocre"),
728            (0.840, "good"),
729            (0.975, "great"),
730            (0.995, "excellent"),
731            (1.000, "ideal"),
732        ]
733    if not quiet: print("Searching via %s..."%searchtype)
734    stations = configparser.ConfigParser()
735    dataname = "stations"
736    if dataname in datafiles:
737        datafile = datafiles[dataname][0]
738        if datafile.endswith(".gz"):
739            import gzip
740            if pyversion("3"):
741                stations.read_string(
742                    gzip.open(datafile).read().decode("utf-8") )
743            else: stations.readfp( gzip.open(datafile) )
744        else:
745            stations.read(datafile)
746    else:
747        message = "%s error: can't find \"%s\" data file\n" % (
748            os.path.basename( sys.argv[0] ),
749            dataname
750        )
751        sys.stderr.write(message)
752        exit(1)
753    zones = configparser.ConfigParser()
754    dataname = "zones"
755    if dataname in datafiles:
756        datafile = datafiles[dataname][0]
757        if datafile.endswith(".gz"):
758            import gzip
759            if pyversion("3"):
760                zones.read_string( gzip.open(datafile).read().decode("utf-8") )
761            else: zones.readfp( gzip.open(datafile) )
762        else:
763            zones.read(datafile)
764    else:
765        message = "%s error: can't find \"%s\" data file\n" % (
766            os.path.basename( sys.argv[0] ),
767            dataname
768        )
769        sys.stderr.write(message)
770        exit(1)
771    search = None
772    station = ("", 0)
773    zone = ("", 0)
774    dataset = None
775    possibilities = []
776    uris = {}
777    if searchtype == "airport":
778        expression = expression.lower()
779        airports = configparser.ConfigParser()
780        dataname = "airports"
781        if dataname in datafiles:
782            datafile = datafiles[dataname][0]
783            if datafile.endswith(".gz"):
784                import gzip
785                if pyversion("3"):
786                    airports.read_string(
787                        gzip.open(datafile).read().decode("utf-8") )
788                else: airports.readfp( gzip.open(datafile) )
789            else:
790                airports.read(datafile)
791        else:
792            message = "%s error: can't find \"%s\" data file\n" % (
793                os.path.basename( sys.argv[0] ),
794                dataname
795            )
796            sys.stderr.write(message)
797            exit(1)
798        if airports.has_section(expression) \
799            and airports.has_option(expression, "station"):
800            search = (expression, "IATA/FAA airport code %s" % expression)
801            station = ( airports.get(expression, "station"), 0 )
802            if stations.has_option(station[0], "zone"):
803                zone = eval( stations.get(station[0], "zone") )
804                dataset = stations
805            if not ( info or quiet ) \
806                and stations.has_option( station[0], "description" ):
807                print(
808                    "[%s result %s]" % (
809                        action,
810                        stations.get(station[0], "description")
811                    )
812                )
813        else:
814            message = "No IATA/FAA airport code \"%s\" in the %s file.\n" % (
815                expression,
816                datafiles["airports"][0]
817            )
818            sys.stderr.write(message)
819            exit(1)
820    elif searchtype == "station":
821        expression = expression.lower()
822        if stations.has_section(expression):
823            station = (expression, 0)
824            if not search:
825                search = (expression, "ICAO station code %s" % expression)
826            if stations.has_option(expression, "zone"):
827                zone = eval( stations.get(expression, "zone") )
828                dataset = stations
829            if not ( info or quiet ) \
830                and stations.has_option(expression, "description"):
831                print(
832                    "[%s result %s]" % (
833                        action,
834                        stations.get(expression, "description")
835                    )
836                )
837        else:
838            message = "No ICAO weather station \"%s\" in the %s file.\n" % (
839                expression,
840                datafiles["stations"][0]
841            )
842            sys.stderr.write(message)
843            exit(1)
844    elif searchtype == "zone":
845        expression = expression.lower()
846        if zones.has_section(expression) \
847            and zones.has_option(expression, "station"):
848            zone = (expression, 0)
849            station = eval( zones.get(expression, "station") )
850            dataset = zones
851            search = (expression, "NWS/NOAA weather zone %s" % expression)
852            if not ( info or quiet ) \
853                and zones.has_option(expression, "description"):
854                print(
855                    "[%s result %s]" % (
856                        action,
857                        zones.get(expression, "description")
858                    )
859                )
860        else:
861            message = "No usable NWS weather zone \"%s\" in the %s file.\n" % (
862                expression,
863                datafiles["zones"][0]
864            )
865            sys.stderr.write(message)
866            exit(1)
867    elif searchtype == "ZCTA":
868        zctas = configparser.ConfigParser()
869        dataname = "zctas"
870        if dataname in datafiles:
871            datafile = datafiles[dataname][0]
872            if datafile.endswith(".gz"):
873                import gzip
874                if pyversion("3"):
875                    zctas.read_string(
876                        gzip.open(datafile).read().decode("utf-8") )
877                else: zctas.readfp( gzip.open(datafile) )
878            else:
879                zctas.read(datafile)
880        else:
881            message = "%s error: can't find \"%s\" data file\n" % (
882                os.path.basename( sys.argv[0] ),
883                dataname
884            )
885            sys.stderr.write(message)
886            exit(1)
887        dataset = zctas
888        if zctas.has_section(expression) \
889            and zctas.has_option(expression, "station"):
890            station = eval( zctas.get(expression, "station") )
891            search = (expression, "Census ZCTA (ZIP code) %s" % expression)
892            if zctas.has_option(expression, "zone"):
893                zone = eval( zctas.get(expression, "zone") )
894        else:
895            message = "No census ZCTA (ZIP code) \"%s\" in the %s file.\n" % (
896                expression,
897                datafiles["zctas"][0]
898            )
899            sys.stderr.write(message)
900            exit(1)
901    elif searchtype == "coordinates":
902        search = (expression, "Geographic coordinates %s" % expression)
903        stationtable = {}
904        for station in stations.sections():
905            if stations.has_option(station, "location"):
906                stationtable[station] = {
907                    "location": eval( stations.get(station, "location") )
908                }
909        station = closest( gecos(expression), stationtable, "location", 0.1 )
910        if not station[0]:
911            message = "No ICAO weather station found near %s.\n" % expression
912            sys.stderr.write(message)
913            exit(1)
914        zonetable = {}
915        for zone in zones.sections():
916            if zones.has_option(zone, "centroid"):
917                zonetable[zone] = {
918                    "centroid": eval( zones.get(zone, "centroid") )
919                }
920        zone = closest( gecos(expression), zonetable, "centroid", 0.1 )
921        if not zone[0]:
922            message = "No NWS weather zone near %s; forecasts unavailable.\n" \
923                % expression
924            sys.stderr.write(message)
925    elif searchtype in ("FIPS", "name"):
926        places = configparser.ConfigParser()
927        dataname = "places"
928        if dataname in datafiles:
929            datafile = datafiles[dataname][0]
930            if datafile.endswith(".gz"):
931                import gzip
932                if pyversion("3"):
933                    places.read_string(
934                        gzip.open(datafile).read().decode("utf-8") )
935                else: places.readfp( gzip.open(datafile) )
936            else:
937                places.read(datafile)
938        else:
939            message = "%s error: can't find \"%s\" data file\n" % (
940                os.path.basename( sys.argv[0] ),
941                dataname
942            )
943            sys.stderr.write(message)
944            exit(1)
945        dataset = places
946        place = expression.lower()
947        if places.has_section(place) and places.has_option(place, "station"):
948            station = eval( places.get(place, "station") )
949            search = (expression, "Census Place %s" % expression)
950            if places.has_option(place, "description"):
951                search = (
952                    search[0],
953                    search[1] + ", %s" % places.get(place, "description")
954                )
955            if places.has_option(place, "zone"):
956                zone = eval( places.get(place, "zone") )
957            if not ( info or quiet ) \
958                and places.has_option(place, "description"):
959                print(
960                    "[%s result %s]" % (
961                        action,
962                        places.get(place, "description")
963                    )
964                )
965        else:
966            for place in places.sections():
967                if places.has_option(place, "description") \
968                    and places.has_option(place, "station") \
969                    and re.search(
970                        expression,
971                        places.get(place, "description"),
972                        re.I
973                    ):
974                        possibilities.append(place)
975            for place in stations.sections():
976                if stations.has_option(place, "description") \
977                    and re.search(
978                        expression,
979                        stations.get(place, "description"),
980                        re.I
981                    ):
982                        possibilities.append(place)
983            for place in zones.sections():
984                if zones.has_option(place, "description") \
985                    and zones.has_option(place, "station") \
986                    and re.search(
987                        expression,
988                        zones.get(place, "description"),
989                        re.I
990                    ):
991                        possibilities.append(place)
992            if len(possibilities) == 1:
993                place = possibilities[0]
994                if places.has_section(place):
995                    station = eval( places.get(place, "station") )
996                    description = places.get(place, "description")
997                    if places.has_option(place, "zone"):
998                        zone = eval( places.get(place, "zone" ) )
999                    search = ( expression, "%s: %s" % (place, description) )
1000                elif stations.has_section(place):
1001                    station = (place, 0.0)
1002                    description = stations.get(place, "description")
1003                    if stations.has_option(place, "zone"):
1004                        zone = eval( stations.get(place, "zone" ) )
1005                    search = ( expression, "ICAO station code %s" % place )
1006                elif zones.has_section(place):
1007                    station = eval( zones.get(place, "station") )
1008                    description = zones.get(place, "description")
1009                    zone = (place, 0.0)
1010                    search = ( expression, "NWS/NOAA weather zone %s" % place )
1011                if not ( info or quiet ):
1012                    print( "[%s result %s]" % (action, description) )
1013            if not possibilities and not station[0]:
1014                message = "No FIPS code/census area match in the %s file.\n" % (
1015                    datafiles["places"][0]
1016                )
1017                sys.stderr.write(message)
1018                exit(1)
1019    if station[0]:
1020        uris["metar"] = stations.get( station[0], "metar" )
1021        if zone[0]:
1022            for key,value in zones.items( zone[0] ):
1023                if key not in ("centroid", "description", "station"):
1024                    uris[key] = value
1025    elif possibilities:
1026        count = len(possibilities)
1027        if count <= max_results:
1028            print( "Your search is ambiguous, returning %s matches:" % count )
1029            for place in sorted(possibilities):
1030                if places.has_section(place):
1031                    print(
1032                        "   [%s] %s" % (
1033                            place,
1034                            places.get(place, "description")
1035                        )
1036                    )
1037                elif stations.has_section(place):
1038                    print(
1039                        "   [%s] %s" % (
1040                            place,
1041                            stations.get(place, "description")
1042                        )
1043                    )
1044                elif zones.has_section(place):
1045                    print(
1046                        "   [%s] %s" % (
1047                            place,
1048                            zones.get(place, "description")
1049                        )
1050                    )
1051        else:
1052            print(
1053                "Your search is too ambiguous, returning %s matches." % count
1054            )
1055        exit(0)
1056    if info:
1057        stationlist = []
1058        zonelist = []
1059        if dataset:
1060            for section in dataset.sections():
1061                if dataset.has_option(section, "station"):
1062                    stationlist.append(
1063                        eval( dataset.get(section, "station") )[1]
1064                    )
1065                if dataset.has_option(section, "zone"):
1066                    zonelist.append( eval( dataset.get(section, "zone") )[1] )
1067        stationlist.sort()
1068        zonelist.sort()
1069        scount = len(stationlist)
1070        zcount = len(zonelist)
1071        sranks = []
1072        zranks = []
1073        for score in scores:
1074            if stationlist:
1075                sranks.append( stationlist[ int( (1-score[0]) * scount ) ] )
1076            if zonelist:
1077                zranks.append( zonelist[ int( (1-score[0]) * zcount ) ] )
1078        description = search[1]
1079        uris["description"] = description
1080        print(
1081            "%s\n%s" % ( description, "-" * len(description) )
1082        )
1083        print(
1084            "%s: %s" % (
1085                station[0],
1086                stations.get( station[0], "description" )
1087            )
1088        )
1089        km = radian_to_km*station[1]
1090        mi = radian_to_mi*station[1]
1091        if sranks and not description.startswith("ICAO station code "):
1092            for index in range(0, len(scores)):
1093                if station[1] >= sranks[index]:
1094                    score = scores[index][1]
1095                    break
1096            print(
1097                "   (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1098            )
1099        elif searchtype == "coordinates":
1100            print( "   (%.3gkm, %.3gmi)" % (km, mi) )
1101        if zone[0]:
1102            print(
1103                "%s: %s" % ( zone[0], zones.get( zone[0], "description" ) )
1104            )
1105        km = radian_to_km*zone[1]
1106        mi = radian_to_mi*zone[1]
1107        if zranks and not description.startswith("NWS/NOAA weather zone "):
1108            for index in range(0, len(scores)):
1109                if zone[1] >= zranks[index]:
1110                    score = scores[index][1]
1111                    break
1112            print(
1113                "   (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1114            )
1115        elif searchtype == "coordinates" and zone[0]:
1116            print( "   (%.3gkm, %.3gmi)" % (km, mi) )
1117    if cache_search:
1118        now = time.time()
1119        nowstamp = "%s (%s)" % (
1120            now,
1121            datetime.datetime.isoformat(
1122                datetime.datetime.fromtimestamp(now),
1123                " "
1124            )
1125        )
1126        search_cache = ["\n"]
1127        search_cache.append( "[%s]\n" % search[0] )
1128        search_cache.append( "cached = %s\n" % nowstamp )
1129        for uriname in sorted(uris.keys()):
1130            search_cache.append( "%s = %s\n" % ( uriname, uris[uriname] ) )
1131        real_cachedir = os.path.expanduser(cachedir)
1132        if not os.path.exists(real_cachedir):
1133            try: os.makedirs(real_cachedir)
1134            except (IOError, OSError): pass
1135        scache_fn = os.path.join(real_cachedir, "searches")
1136        if not os.path.exists(scache_fn):
1137            then = sorted(
1138                    [ x[1] for x in datafiles.values() ],
1139                    reverse=True
1140                )[0]
1141            thenstamp = "%s (%s)" % (
1142                then,
1143                datetime.datetime.isoformat(
1144                    datetime.datetime.fromtimestamp(then),
1145                    " "
1146                )
1147            )
1148            search_cache.insert(
1149                0,
1150                "# based on data files from: %s\n" % thenstamp
1151            )
1152        try:
1153            scache_existing = configparser.ConfigParser()
1154            scache_existing.read(scache_fn)
1155            if not scache_existing.has_section(search[0]):
1156                scache_fd = codecs.open(scache_fn, "a", "utf-8")
1157                scache_fd.writelines(search_cache)
1158                scache_fd.close()
1159        except (IOError, OSError): pass
1160    if not info:
1161        return(uris)
1162
1163def closest(position, nodes, fieldname, angle=None):
1164    import math
1165    if not angle: angle = 2*math.pi
1166    match = None
1167    for name in nodes:
1168        if fieldname in nodes[name]:
1169            node = nodes[name][fieldname]
1170            if node and abs( position[0]-node[0] ) < angle:
1171                if abs( position[1]-node[1] ) < angle \
1172                    or abs( abs( position[1]-node[1] ) - 2*math.pi ) < angle:
1173                    if position == node:
1174                        angle = 0
1175                        match = name
1176                    else:
1177                        candidate = math.acos(
1178                            math.sin( position[0] ) * math.sin( node[0] ) \
1179                                + math.cos( position[0] ) \
1180                                * math.cos( node[0] ) \
1181                                * math.cos( position[1] - node[1] )
1182                            )
1183                        if candidate < angle:
1184                            angle = candidate
1185                            match = name
1186    if match: match = str(match)
1187    return (match, angle)
1188
1189def gecos(formatted):
1190    import math, re
1191    coordinates = formatted.split(",")
1192    for coordinate in range(0, 2):
1193        degrees, foo, minutes, bar, seconds, hemisphere = re.match(
1194            r"([\+-]?\d+\.?\d*)(-(\d+))?(-(\d+))?([ensw]?)$",
1195            coordinates[coordinate].strip().lower()
1196        ).groups()
1197        value = float(degrees)
1198        if minutes: value += float(minutes)/60
1199        if seconds: value += float(seconds)/3600
1200        if hemisphere and hemisphere in "sw": value *= -1
1201        coordinates[coordinate] = math.radians(value)
1202    return tuple(coordinates)
1203
1204def correlate():
1205    import codecs, csv, datetime, hashlib, os, re, sys, tarfile, time, zipfile
1206    if pyversion("3"): import configparser
1207    else: import ConfigParser as configparser
1208    for filename in os.listdir("."):
1209        if re.match("[0-9]{4}_Gaz_counties_national.zip$", filename):
1210            gcounties_an = filename
1211            gcounties_fn = filename[:-4] + ".txt"
1212        elif re.match("[0-9]{4}_Gaz_cousubs_national.zip$", filename):
1213            gcousubs_an = filename
1214            gcousubs_fn = filename[:-4] + ".txt"
1215        elif re.match("[0-9]{4}_Gaz_place_national.zip$", filename):
1216            gplace_an = filename
1217            gplace_fn = filename[:-4] + ".txt"
1218        elif re.match("[0-9]{4}_Gaz_zcta_national.zip$", filename):
1219            gzcta_an = filename
1220            gzcta_fn = filename[:-4] + ".txt"
1221        elif re.match("bp[0-9]{2}[a-z]{2}[0-9]{2}.dbx$", filename):
1222            cpfzcf_fn = filename
1223    nsdcccc_fn = "nsd_cccc.txt"
1224    ourairports_fn = "airports.csv"
1225    overrides_fn = "overrides.conf"
1226    overrideslog_fn = "overrides.log"
1227    slist_fn = "slist"
1228    zlist_fn = "zlist"
1229    qalog_fn = "qa.log"
1230    airports_fn = "airports"
1231    places_fn = "places"
1232    stations_fn = "stations"
1233    zctas_fn = "zctas"
1234    zones_fn = "zones"
1235    header = """\
1236%s
1237# generated by %s on %s from these public domain sources:
1238#
1239# https://www.census.gov/geographies/reference-files/time-series/geo/gazetteer-files.html
1240# %s %s %s
1241# %s %s %s
1242# %s %s %s
1243# %s %s %s
1244#
1245# https://www.weather.gov/gis/ZoneCounty/
1246# %s %s %s
1247#
1248# https://tgftp.nws.noaa.gov/data/
1249# %s %s %s
1250#
1251# https://ourairports.com/data/
1252# %s %s %s
1253#
1254# ...and these manually-generated or hand-compiled adjustments:
1255# %s %s %s
1256# %s %s %s
1257# %s %s %s\
1258""" % (
1259        weather_copyright,
1260        os.path.basename( sys.argv[0] ),
1261        datetime.date.isoformat(
1262            datetime.datetime.utcfromtimestamp( int(os.environ.get('SOURCE_DATE_EPOCH', time.time())) )
1263        ),
1264        hashlib.md5( open(gcounties_an, "rb").read() ).hexdigest(),
1265        datetime.date.isoformat(
1266            datetime.datetime.utcfromtimestamp( os.path.getmtime(gcounties_an) )
1267        ),
1268        gcounties_an,
1269        hashlib.md5( open(gcousubs_an, "rb").read() ).hexdigest(),
1270        datetime.date.isoformat(
1271            datetime.datetime.utcfromtimestamp( os.path.getmtime(gcousubs_an) )
1272        ),
1273        gcousubs_an,
1274        hashlib.md5( open(gplace_an, "rb").read() ).hexdigest(),
1275        datetime.date.isoformat(
1276            datetime.datetime.utcfromtimestamp( os.path.getmtime(gplace_an) )
1277        ),
1278        gplace_an,
1279        hashlib.md5( open(gzcta_an, "rb").read() ).hexdigest(),
1280        datetime.date.isoformat(
1281            datetime.datetime.utcfromtimestamp( os.path.getmtime(gzcta_an) )
1282        ),
1283        gzcta_an,
1284        hashlib.md5( open(cpfzcf_fn, "rb").read() ).hexdigest(),
1285        datetime.date.isoformat(
1286            datetime.datetime.utcfromtimestamp( os.path.getmtime(cpfzcf_fn) )
1287        ),
1288        cpfzcf_fn,
1289        hashlib.md5( open(nsdcccc_fn, "rb").read() ).hexdigest(),
1290        datetime.date.isoformat(
1291            datetime.datetime.utcfromtimestamp( os.path.getmtime(nsdcccc_fn) )
1292        ),
1293        nsdcccc_fn,
1294        hashlib.md5( open(ourairports_fn, "rb").read() ).hexdigest(),
1295        datetime.date.isoformat(
1296            datetime.datetime.utcfromtimestamp( os.path.getmtime(ourairports_fn) )
1297        ),
1298        ourairports_fn,
1299        hashlib.md5( open(overrides_fn, "rb").read() ).hexdigest(),
1300        datetime.date.isoformat(
1301            datetime.datetime.utcfromtimestamp( os.path.getmtime(overrides_fn) )
1302        ),
1303        overrides_fn,
1304        hashlib.md5( open(slist_fn, "rb").read() ).hexdigest(),
1305        datetime.date.isoformat(
1306            datetime.datetime.utcfromtimestamp( os.path.getmtime(slist_fn) )
1307        ),
1308        slist_fn,
1309        hashlib.md5( open(zlist_fn, "rb").read() ).hexdigest(),
1310        datetime.date.isoformat(
1311            datetime.datetime.utcfromtimestamp( os.path.getmtime(zlist_fn) )
1312        ),
1313        zlist_fn
1314    )
1315    airports = {}
1316    places = {}
1317    stations = {}
1318    zctas = {}
1319    zones = {}
1320    message = "Reading %s:%s..." % (gcounties_an, gcounties_fn)
1321    sys.stdout.write(message)
1322    sys.stdout.flush()
1323    count = 0
1324    gcounties = zipfile.ZipFile(gcounties_an).open(gcounties_fn, "r")
1325    columns = gcounties.readline().decode("utf-8").strip().split("\t")
1326    for line in gcounties:
1327        fields = line.decode("utf-8").strip().split("\t")
1328        f_geoid = fields[ columns.index("GEOID") ].strip()
1329        f_name = fields[ columns.index("NAME") ].strip()
1330        f_usps = fields[ columns.index("USPS") ].strip()
1331        f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1332        f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1333        if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1334            fips = "fips%s" % f_geoid
1335            if fips not in places: places[fips] = {}
1336            places[fips]["centroid"] = gecos(
1337                "%s,%s" % (f_intptlat, f_intptlong)
1338            )
1339            places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1340            count += 1
1341    gcounties.close()
1342    print("done (%s lines)." % count)
1343    message = "Reading %s:%s..." % (gcousubs_an, gcousubs_fn)
1344    sys.stdout.write(message)
1345    sys.stdout.flush()
1346    count = 0
1347    gcousubs = zipfile.ZipFile(gcousubs_an).open(gcousubs_fn, "r")
1348    columns = gcousubs.readline().decode("utf-8").strip().split("\t")
1349    for line in gcousubs:
1350        fields = line.decode("utf-8").strip().split("\t")
1351        f_geoid = fields[ columns.index("GEOID") ].strip()
1352        f_name = fields[ columns.index("NAME") ].strip()
1353        f_usps = fields[ columns.index("USPS") ].strip()
1354        f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1355        f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1356        if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1357            fips = "fips%s" % f_geoid
1358            if fips not in places: places[fips] = {}
1359            places[fips]["centroid"] = gecos(
1360                "%s,%s" % (f_intptlat, f_intptlong)
1361            )
1362            places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1363            count += 1
1364    gcousubs.close()
1365    print("done (%s lines)." % count)
1366    message = "Reading %s:%s..." % (gplace_an, gplace_fn)
1367    sys.stdout.write(message)
1368    sys.stdout.flush()
1369    count = 0
1370    gplace = zipfile.ZipFile(gplace_an).open(gplace_fn, "r")
1371    columns = gplace.readline().decode("utf-8").strip().split("\t")
1372    for line in gplace:
1373        fields = line.decode("utf-8").strip().split("\t")
1374        f_geoid = fields[ columns.index("GEOID") ].strip()
1375        f_name = fields[ columns.index("NAME") ].strip()
1376        f_usps = fields[ columns.index("USPS") ].strip()
1377        f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1378        f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1379        if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1380            fips = "fips%s" % f_geoid
1381            if fips not in places: places[fips] = {}
1382            places[fips]["centroid"] = gecos(
1383                "%s,%s" % (f_intptlat, f_intptlong)
1384            )
1385            places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1386            count += 1
1387    gplace.close()
1388    print("done (%s lines)." % count)
1389    message = "Reading %s..." % slist_fn
1390    sys.stdout.write(message)
1391    sys.stdout.flush()
1392    count = 0
1393    slist = codecs.open(slist_fn, "rU", "utf-8")
1394    for line in slist:
1395        icao = line.split("#")[0].strip()
1396        if icao:
1397            stations[icao] = {
1398                "metar": "https://tgftp.nws.noaa.gov/data/observations/"\
1399                    + "metar/decoded/%s.TXT" % icao.upper()
1400            }
1401            count += 1
1402    slist.close()
1403    print("done (%s lines)." % count)
1404    message = "Reading %s..." % nsdcccc_fn
1405    sys.stdout.write(message)
1406    sys.stdout.flush()
1407    count = 0
1408    nsdcccc = codecs.open(nsdcccc_fn, "rU", "utf-8")
1409    for line in nsdcccc:
1410        line = str(line)
1411        fields = line.split(";")
1412        icao = fields[0].strip().lower()
1413        if icao in stations:
1414            description = []
1415            name = " ".join( fields[3].strip().title().split() )
1416            if name: description.append(name)
1417            st = fields[4].strip()
1418            if st: description.append(st)
1419            country = " ".join( fields[5].strip().title().split() )
1420            if country: description.append(country)
1421            if description:
1422                stations[icao]["description"] = ", ".join(description)
1423            lat, lon = fields[7:9]
1424            if lat and lon:
1425                stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1426            elif "location" not in stations[icao]:
1427                lat, lon = fields[5:7]
1428                if lat and lon:
1429                    stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1430        count += 1
1431    nsdcccc.close()
1432    print("done (%s lines)." % count)
1433    message = "Reading %s..." % ourairports_fn
1434    sys.stdout.write(message)
1435    sys.stdout.flush()
1436    count = 0
1437    ourairports = open(ourairports_fn, "rU")
1438    for row in csv.reader(ourairports):
1439        icao = row[12].lower()
1440        if icao in stations:
1441            iata = row[13].lower()
1442            if len(iata) == 3: airports[iata] = { "station": icao }
1443            if "description" not in stations[icao]:
1444                description = []
1445                name = row[3]
1446                if name: description.append(name)
1447                municipality = row[10]
1448                if municipality: description.append(municipality)
1449                region = row[9]
1450                country = row[8]
1451                if region:
1452                    if "-" in region:
1453                        c,r = region.split("-", 1)
1454                        if c == country: region = r
1455                    description.append(region)
1456                if country:
1457                    description.append(country)
1458                if description:
1459                    stations[icao]["description"] = ", ".join(description)
1460            if "location" not in stations[icao]:
1461                lat = row[4]
1462                if lat:
1463                    lon = row[5]
1464                    if lon:
1465                        stations[icao]["location"] = gecos(
1466                            "%s,%s" % (lat, lon)
1467                        )
1468        count += 1
1469    ourairports.close()
1470    print("done (%s lines)." % count)
1471    message = "Reading %s..." % zlist_fn
1472    sys.stdout.write(message)
1473    sys.stdout.flush()
1474    count = 0
1475    zlist = codecs.open(zlist_fn, "rU", "utf-8")
1476    for line in zlist:
1477        line = line.split("#")[0].strip()
1478        if line:
1479            zones[line] = {}
1480            count += 1
1481    zlist.close()
1482    print("done (%s lines)." % count)
1483    message = "Reading %s..." % cpfzcf_fn
1484    sys.stdout.write(message)
1485    sys.stdout.flush()
1486    count = 0
1487    cpfz = {}
1488    cpfzcf = codecs.open(cpfzcf_fn, "rU", "utf-8")
1489    for line in cpfzcf:
1490        fields = line.strip().split("|")
1491        if len(fields) == 11 \
1492            and fields[0] and fields[1] and fields[9] and fields[10]:
1493            zone = "z".join( fields[:2] ).lower()
1494            if zone in zones:
1495                state = fields[0]
1496                if state:
1497                    zones[zone]["coastal_flood_statement"] = (
1498                        "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1499                        "flood/coastal/%s/%s.txt" % (state.lower(), zone))
1500                    zones[zone]["flash_flood_statement"] = (
1501                        "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1502                        "flash_flood/statement/%s/%s.txt"
1503                        % (state.lower(), zone))
1504                    zones[zone]["flash_flood_warning"] = (
1505                        "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1506                        "flash_flood/warning/%s/%s.txt"
1507                        % (state.lower(), zone))
1508                    zones[zone]["flash_flood_watch"] = (
1509                        "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1510                        "flash_flood/watch/%s/%s.txt" % (state.lower(), zone))
1511                    zones[zone]["flood_statement"] = (
1512                        "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1513                        "flood/statement/%s/%s.txt" % (state.lower(), zone))
1514                    zones[zone]["flood_warning"] = (
1515                        "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1516                        "flood/warning/%s/%s.txt" % (state.lower(), zone))
1517                    zones[zone]["severe_thunderstorm_warning"] = (
1518                        "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1519                        "thunderstorm/%s/%s.txt" % (state.lower(), zone))
1520                    zones[zone]["severe_weather_statement"] = (
1521                        "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1522                        "severe_weather_stmt/%s/%s.txt"
1523                        % (state.lower(), zone))
1524                    zones[zone]["short_term_forecast"] = (
1525                        "https://tgftp.nws.noaa.gov/data/forecasts/nowcast/"
1526                        "%s/%s.txt" % (state.lower(), zone))
1527                    zones[zone]["special_weather_statement"] = (
1528                        "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1529                        "special_weather_stmt/%s/%s.txt"
1530                        % (state.lower(), zone))
1531                    zones[zone]["state_forecast"] = (
1532                        "https://tgftp.nws.noaa.gov/data/forecasts/state/"
1533                        "%s/%s.txt" % (state.lower(), zone))
1534                    zones[zone]["urgent_weather_message"] = (
1535                        "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1536                        "non_precip/%s/%s.txt" % (state.lower(), zone))
1537                    zones[zone]["zone_forecast"] = (
1538                        "https://tgftp.nws.noaa.gov/data/forecasts/zone/"
1539                        "%s/%s.txt" % (state.lower(), zone))
1540                description = fields[3].strip()
1541                fips = "fips%s"%fields[6]
1542                county = fields[5]
1543                if county:
1544                    if description.endswith(county):
1545                        description += " County"
1546                    else:
1547                        description += ", %s County" % county
1548                description += ", %s, US" % state
1549                zones[zone]["description"] = description
1550                zones[zone]["centroid"] = gecos( ",".join( fields[9:11] ) )
1551                if fips in places and not zones[zone]["centroid"]:
1552                    zones[zone]["centroid"] = places[fips]["centroid"]
1553        count += 1
1554    cpfzcf.close()
1555    print("done (%s lines)." % count)
1556    message = "Reading %s:%s..." % (gzcta_an, gzcta_fn)
1557    sys.stdout.write(message)
1558    sys.stdout.flush()
1559    count = 0
1560    gzcta = zipfile.ZipFile(gzcta_an).open(gzcta_fn, "r")
1561    columns = gzcta.readline().decode("utf-8").strip().split("\t")
1562    for line in gzcta:
1563        fields = line.decode("utf-8").strip().split("\t")
1564        f_geoid = fields[ columns.index("GEOID") ].strip()
1565        f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1566        f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1567        if f_geoid and f_intptlat and f_intptlong:
1568            if f_geoid not in zctas: zctas[f_geoid] = {}
1569            zctas[f_geoid]["centroid"] = gecos(
1570                "%s,%s" % (f_intptlat, f_intptlong)
1571            )
1572            count += 1
1573    gzcta.close()
1574    print("done (%s lines)." % count)
1575    message = "Reading %s..." % overrides_fn
1576    sys.stdout.write(message)
1577    sys.stdout.flush()
1578    count = 0
1579    added = 0
1580    removed = 0
1581    changed = 0
1582    overrides = configparser.ConfigParser()
1583    overrides.readfp( codecs.open(overrides_fn, "r", "utf8") )
1584    overrideslog = []
1585    for section in overrides.sections():
1586        addopt = 0
1587        chgopt = 0
1588        if section.startswith("-"):
1589            section = section[1:]
1590            delete = True
1591        else: delete = False
1592        if re.match("[A-Za-z]{3}$", section):
1593            if delete:
1594                if section in airports:
1595                    del( airports[section] )
1596                    logact = "removed airport %s" % section
1597                    removed += 1
1598                else:
1599                    logact = "tried to remove nonexistent airport %s" % section
1600            else:
1601                if section in airports:
1602                    logact = "changed airport %s" % section
1603                    changed += 1
1604                else:
1605                    airports[section] = {}
1606                    logact = "added airport %s" % section
1607                    added += 1
1608                for key,value in overrides.items(section):
1609                    if key in airports[section]: chgopt += 1
1610                    else: addopt += 1
1611                    if key in ("centroid", "location"):
1612                        airports[section][key] = eval(value)
1613                    else:
1614                        airports[section][key] = value
1615                if addopt and chgopt:
1616                    logact += " (+%s/!%s options)" % (addopt, chgopt)
1617                elif addopt: logact += " (+%s options)" % addopt
1618                elif chgopt: logact += " (!%s options)" % chgopt
1619        elif re.match("[A-Za-z0-9]{4}$", section):
1620            if delete:
1621                if section in stations:
1622                    del( stations[section] )
1623                    logact = "removed station %s" % section
1624                    removed += 1
1625                else:
1626                    logact = "tried to remove nonexistent station %s" % section
1627            else:
1628                if section in stations:
1629                    logact = "changed station %s" % section
1630                    changed += 1
1631                else:
1632                    stations[section] = {}
1633                    logact = "added station %s" % section
1634                    added += 1
1635                for key,value in overrides.items(section):
1636                    if key in stations[section]: chgopt += 1
1637                    else: addopt += 1
1638                    if key in ("centroid", "location"):
1639                        stations[section][key] = eval(value)
1640                    else:
1641                        stations[section][key] = value
1642                if addopt and chgopt:
1643                    logact += " (+%s/!%s options)" % (addopt, chgopt)
1644                elif addopt: logact += " (+%s options)" % addopt
1645                elif chgopt: logact += " (!%s options)" % chgopt
1646        elif re.match("[0-9]{5}$", section):
1647            if delete:
1648                if section in zctas:
1649                    del( zctas[section] )
1650                    logact = "removed zcta %s" % section
1651                    removed += 1
1652                else:
1653                    logact = "tried to remove nonexistent zcta %s" % section
1654            else:
1655                if section in zctas:
1656                    logact = "changed zcta %s" % section
1657                    changed += 1
1658                else:
1659                    zctas[section] = {}
1660                    logact = "added zcta %s" % section
1661                    added += 1
1662                for key,value in overrides.items(section):
1663                    if key in zctas[section]: chgopt += 1
1664                    else: addopt += 1
1665                    if key in ("centroid", "location"):
1666                        zctas[section][key] = eval(value)
1667                    else:
1668                        zctas[section][key] = value
1669                if addopt and chgopt:
1670                    logact += " (+%s/!%s options)" % (addopt, chgopt)
1671                elif addopt: logact += " (+%s options)" % addopt
1672                elif chgopt: logact += " (!%s options)" % chgopt
1673        elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", section):
1674            if delete:
1675                if section in zones:
1676                    del( zones[section] )
1677                    logact = "removed zone %s" % section
1678                    removed += 1
1679                else:
1680                    logact = "tried to remove nonexistent zone %s" % section
1681            else:
1682                if section in zones:
1683                    logact = "changed zone %s" % section
1684                    changed += 1
1685                else:
1686                    zones[section] = {}
1687                    logact = "added zone %s" % section
1688                    added += 1
1689                for key,value in overrides.items(section):
1690                    if key in zones[section]: chgopt += 1
1691                    else: addopt += 1
1692                    if key in ("centroid", "location"):
1693                        zones[section][key] = eval(value)
1694                    else:
1695                        zones[section][key] = value
1696                if addopt and chgopt:
1697                    logact += " (+%s/!%s options)" % (addopt, chgopt)
1698                elif addopt: logact += " (+%s options)" % addopt
1699                elif chgopt: logact += " (!%s options)" % chgopt
1700        elif re.match("fips[0-9]+$", section):
1701            if delete:
1702                if section in places:
1703                    del( places[section] )
1704                    logact = "removed place %s" % section
1705                    removed += 1
1706                else:
1707                    logact = "tried to remove nonexistent place %s" % section
1708            else:
1709                if section in places:
1710                    logact = "changed place %s" % section
1711                    changed += 1
1712                else:
1713                    places[section] = {}
1714                    logact = "added place %s" % section
1715                    added += 1
1716                for key,value in overrides.items(section):
1717                    if key in places[section]: chgopt += 1
1718                    else: addopt += 1
1719                    if key in ("centroid", "location"):
1720                        places[section][key] = eval(value)
1721                    else:
1722                        places[section][key] = value
1723                if addopt and chgopt:
1724                    logact += " (+%s/!%s options)" % (addopt, chgopt)
1725                elif addopt: logact += " (+%s options)" % addopt
1726                elif chgopt: logact += " (!%s options)" % chgopt
1727        count += 1
1728        overrideslog.append("%s\n" % logact)
1729    overrideslog.sort()
1730    if os.path.exists(overrideslog_fn):
1731        os.rename(overrideslog_fn, "%s_old"%overrideslog_fn)
1732    overrideslog_fd = codecs.open(overrideslog_fn, "w", "utf8")
1733    import time
1734    overrideslog_fd.write(
1735        '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
1736        '# use, copy, modify, and distribute this software is granted under terms\n'
1737        '# provided in the LICENSE file distributed with this software.\n\n'
1738        % time.gmtime().tm_year)
1739    overrideslog_fd.writelines(overrideslog)
1740    overrideslog_fd.close()
1741    print("done (%s overridden sections: +%s/-%s/!%s)." % (
1742        count,
1743        added,
1744        removed,
1745        changed
1746    ) )
1747    estimate = 2*len(places) + len(stations) + 2*len(zctas) + len(zones)
1748    print(
1749        "Correlating places, stations, ZCTAs and zones (upper bound is %s):" % \
1750            estimate
1751    )
1752    count = 0
1753    milestones = list( range(51) )
1754    message = "   "
1755    sys.stdout.write(message)
1756    sys.stdout.flush()
1757    for fips in places:
1758        centroid = places[fips]["centroid"]
1759        if centroid:
1760            station = closest(centroid, stations, "location", 0.1)
1761        if station[0]:
1762            places[fips]["station"] = station
1763            count += 1
1764            if not count%100:
1765                level = int(50*count/estimate)
1766                if level in milestones:
1767                    for remaining in milestones[:milestones.index(level)+1]:
1768                        if remaining%5:
1769                            message = "."
1770                            sys.stdout.write(message)
1771                            sys.stdout.flush()
1772                        else:
1773                            message = "%s%%" % (remaining*2,)
1774                            sys.stdout.write(message)
1775                            sys.stdout.flush()
1776                        milestones.remove(remaining)
1777        if centroid:
1778            zone = closest(centroid, zones, "centroid", 0.1)
1779        if zone[0]:
1780            places[fips]["zone"] = zone
1781            count += 1
1782            if not count%100:
1783                level = int(50*count/estimate)
1784                if level in milestones:
1785                    for remaining in milestones[:milestones.index(level)+1]:
1786                        if remaining%5:
1787                            message = "."
1788                            sys.stdout.write(message)
1789                            sys.stdout.flush()
1790                        else:
1791                            message = "%s%%" % (remaining*2,)
1792                            sys.stdout.write(message)
1793                            sys.stdout.flush()
1794                        milestones.remove(remaining)
1795    for station in stations:
1796        if "location" in stations[station]:
1797            location = stations[station]["location"]
1798            if location:
1799                zone = closest(location, zones, "centroid", 0.1)
1800            if zone[0]:
1801                stations[station]["zone"] = zone
1802                count += 1
1803                if not count%100:
1804                    level = int(50*count/estimate)
1805                    if level in milestones:
1806                        for remaining in milestones[:milestones.index(level)+1]:
1807                            if remaining%5:
1808                                message = "."
1809                                sys.stdout.write(message)
1810                                sys.stdout.flush()
1811                            else:
1812                                message = "%s%%" % (remaining*2,)
1813                                sys.stdout.write(message)
1814                                sys.stdout.flush()
1815                            milestones.remove(remaining)
1816    for zcta in zctas.keys():
1817        centroid = zctas[zcta]["centroid"]
1818        if centroid:
1819            station = closest(centroid, stations, "location", 0.1)
1820        if station[0]:
1821            zctas[zcta]["station"] = station
1822            count += 1
1823            if not count%100:
1824                level = int(50*count/estimate)
1825                if level in milestones:
1826                    for remaining in milestones[ : milestones.index(level)+1 ]:
1827                        if remaining%5:
1828                            message = "."
1829                            sys.stdout.write(message)
1830                            sys.stdout.flush()
1831                        else:
1832                            message = "%s%%" % (remaining*2,)
1833                            sys.stdout.write(message)
1834                            sys.stdout.flush()
1835                        milestones.remove(remaining)
1836        if centroid:
1837            zone = closest(centroid, zones, "centroid", 0.1)
1838        if zone[0]:
1839            zctas[zcta]["zone"] = zone
1840            count += 1
1841            if not count%100:
1842                level = int(50*count/estimate)
1843                if level in milestones:
1844                    for remaining in milestones[:milestones.index(level)+1]:
1845                        if remaining%5:
1846                            message = "."
1847                            sys.stdout.write(message)
1848                            sys.stdout.flush()
1849                        else:
1850                            message = "%s%%" % (remaining*2,)
1851                            sys.stdout.write(message)
1852                            sys.stdout.flush()
1853                        milestones.remove(remaining)
1854    for zone in zones.keys():
1855        if "centroid" in zones[zone]:
1856            centroid = zones[zone]["centroid"]
1857            if centroid:
1858                station = closest(centroid, stations, "location", 0.1)
1859            if station[0]:
1860                zones[zone]["station"] = station
1861                count += 1
1862                if not count%100:
1863                    level = int(50*count/estimate)
1864                    if level in milestones:
1865                        for remaining in milestones[:milestones.index(level)+1]:
1866                            if remaining%5:
1867                                message = "."
1868                                sys.stdout.write(message)
1869                                sys.stdout.flush()
1870                            else:
1871                                message = "%s%%" % (remaining*2,)
1872                                sys.stdout.write(message)
1873                                sys.stdout.flush()
1874                            milestones.remove(remaining)
1875    for remaining in milestones:
1876        if remaining%5:
1877            message = "."
1878            sys.stdout.write(message)
1879            sys.stdout.flush()
1880        else:
1881            message = "%s%%" % (remaining*2,)
1882            sys.stdout.write(message)
1883            sys.stdout.flush()
1884    print("\n   done (%s correlations)." % count)
1885    message = "Writing %s..." % airports_fn
1886    sys.stdout.write(message)
1887    sys.stdout.flush()
1888    count = 0
1889    if os.path.exists(airports_fn):
1890        os.rename(airports_fn, "%s_old"%airports_fn)
1891    airports_fd = codecs.open(airports_fn, "w", "utf8")
1892    airports_fd.write(header)
1893    for airport in sorted( airports.keys() ):
1894        airports_fd.write("\n\n[%s]" % airport)
1895        for key, value in sorted( airports[airport].items() ):
1896            if type(value) is float: value = "%.7f"%value
1897            elif type(value) is tuple:
1898                elements = []
1899                for element in value:
1900                    if type(element) is float: elements.append("%.7f"%element)
1901                    else: elements.append( repr(element) )
1902                value = "(%s)"%", ".join(elements)
1903            airports_fd.write( "\n%s = %s" % (key, value) )
1904        count += 1
1905    airports_fd.write("\n")
1906    airports_fd.close()
1907    print("done (%s sections)." % count)
1908    message = "Writing %s..." % places_fn
1909    sys.stdout.write(message)
1910    sys.stdout.flush()
1911    count = 0
1912    if os.path.exists(places_fn):
1913        os.rename(places_fn, "%s_old"%places_fn)
1914    places_fd = codecs.open(places_fn, "w", "utf8")
1915    places_fd.write(header)
1916    for fips in sorted( places.keys() ):
1917        places_fd.write("\n\n[%s]" % fips)
1918        for key, value in sorted( places[fips].items() ):
1919            if type(value) is float: value = "%.7f"%value
1920            elif type(value) is tuple:
1921                elements = []
1922                for element in value:
1923                    if type(element) is float: elements.append("%.7f"%element)
1924                    else: elements.append( repr(element) )
1925                value = "(%s)"%", ".join(elements)
1926            places_fd.write( "\n%s = %s" % (key, value) )
1927        count += 1
1928    places_fd.write("\n")
1929    places_fd.close()
1930    print("done (%s sections)." % count)
1931    message = "Writing %s..." % stations_fn
1932    sys.stdout.write(message)
1933    sys.stdout.flush()
1934    count = 0
1935    if os.path.exists(stations_fn):
1936        os.rename(stations_fn, "%s_old"%stations_fn)
1937    stations_fd = codecs.open(stations_fn, "w", "utf-8")
1938    stations_fd.write(header)
1939    for station in sorted( stations.keys() ):
1940        stations_fd.write("\n\n[%s]" % station)
1941        for key, value in sorted( stations[station].items() ):
1942            if type(value) is float: value = "%.7f"%value
1943            elif type(value) is tuple:
1944                elements = []
1945                for element in value:
1946                    if type(element) is float: elements.append("%.7f"%element)
1947                    else: elements.append( repr(element) )
1948                value = "(%s)"%", ".join(elements)
1949            if type(value) is bytes:
1950                value = value.decode("utf-8")
1951            stations_fd.write( "\n%s = %s" % (key, value) )
1952        count += 1
1953    stations_fd.write("\n")
1954    stations_fd.close()
1955    print("done (%s sections)." % count)
1956    message = "Writing %s..." % zctas_fn
1957    sys.stdout.write(message)
1958    sys.stdout.flush()
1959    count = 0
1960    if os.path.exists(zctas_fn):
1961        os.rename(zctas_fn, "%s_old"%zctas_fn)
1962    zctas_fd = codecs.open(zctas_fn, "w", "utf8")
1963    zctas_fd.write(header)
1964    for zcta in sorted( zctas.keys() ):
1965        zctas_fd.write("\n\n[%s]" % zcta)
1966        for key, value in sorted( zctas[zcta].items() ):
1967            if type(value) is float: value = "%.7f"%value
1968            elif type(value) is tuple:
1969                elements = []
1970                for element in value:
1971                    if type(element) is float: elements.append("%.7f"%element)
1972                    else: elements.append( repr(element) )
1973                value = "(%s)"%", ".join(elements)
1974            zctas_fd.write( "\n%s = %s" % (key, value) )
1975        count += 1
1976    zctas_fd.write("\n")
1977    zctas_fd.close()
1978    print("done (%s sections)." % count)
1979    message = "Writing %s..." % zones_fn
1980    sys.stdout.write(message)
1981    sys.stdout.flush()
1982    count = 0
1983    if os.path.exists(zones_fn):
1984        os.rename(zones_fn, "%s_old"%zones_fn)
1985    zones_fd = codecs.open(zones_fn, "w", "utf8")
1986    zones_fd.write(header)
1987    for zone in sorted( zones.keys() ):
1988        zones_fd.write("\n\n[%s]" % zone)
1989        for key, value in sorted( zones[zone].items() ):
1990            if type(value) is float: value = "%.7f"%value
1991            elif type(value) is tuple:
1992                elements = []
1993                for element in value:
1994                    if type(element) is float: elements.append("%.7f"%element)
1995                    else: elements.append( repr(element) )
1996                value = "(%s)"%", ".join(elements)
1997            zones_fd.write( "\n%s = %s" % (key, value) )
1998        count += 1
1999    zones_fd.write("\n")
2000    zones_fd.close()
2001    print("done (%s sections)." % count)
2002    message = "Starting QA check..."
2003    sys.stdout.write(message)
2004    sys.stdout.flush()
2005    airports = configparser.ConfigParser()
2006    airports.read(airports_fn)
2007    places = configparser.ConfigParser()
2008    places.read(places_fn)
2009    stations = configparser.ConfigParser()
2010    stations.read(stations_fn)
2011    zctas = configparser.ConfigParser()
2012    zctas.read(zctas_fn)
2013    zones = configparser.ConfigParser()
2014    zones.read(zones_fn)
2015    qalog = []
2016    places_nocentroid = 0
2017    places_nodescription = 0
2018    for place in sorted( places.sections() ):
2019        if not places.has_option(place, "centroid"):
2020            qalog.append("%s: no centroid\n" % place)
2021            places_nocentroid += 1
2022        if not places.has_option(place, "description"):
2023            qalog.append("%s: no description\n" % place)
2024            places_nodescription += 1
2025    stations_nodescription = 0
2026    stations_nolocation = 0
2027    stations_nometar = 0
2028    for station in sorted( stations.sections() ):
2029        if not stations.has_option(station, "description"):
2030            qalog.append("%s: no description\n" % station)
2031            stations_nodescription += 1
2032        if not stations.has_option(station, "location"):
2033            qalog.append("%s: no location\n" % station)
2034            stations_nolocation += 1
2035        if not stations.has_option(station, "metar"):
2036            qalog.append("%s: no metar\n" % station)
2037            stations_nometar += 1
2038    airports_badstation = 0
2039    airports_nostation = 0
2040    for airport in sorted( airports.sections() ):
2041        if not airports.has_option(airport, "station"):
2042            qalog.append("%s: no station\n" % airport)
2043            airports_nostation += 1
2044        else:
2045            station = airports.get(airport, "station")
2046            if station not in stations.sections():
2047                qalog.append( "%s: bad station %s\n" % (airport, station) )
2048                airports_badstation += 1
2049    zctas_nocentroid = 0
2050    for zcta in sorted( zctas.sections() ):
2051        if not zctas.has_option(zcta, "centroid"):
2052            qalog.append("%s: no centroid\n" % zcta)
2053            zctas_nocentroid += 1
2054    zones_nocentroid = 0
2055    zones_nodescription = 0
2056    zones_noforecast = 0
2057    zones_overlapping = 0
2058    zonetable = {}
2059    for zone in zones.sections():
2060        if zones.has_option(zone, "centroid"):
2061            zonetable[zone] = {
2062                "centroid": eval( zones.get(zone, "centroid") )
2063            }
2064    for zone in sorted( zones.sections() ):
2065        if zones.has_option(zone, "centroid"):
2066            zonetable_local = zonetable.copy()
2067            del( zonetable_local[zone] )
2068            centroid = eval( zones.get(zone, "centroid") )
2069            if centroid:
2070                nearest = closest(centroid, zonetable_local, "centroid", 0.1)
2071            if nearest[1]*radian_to_km < 1:
2072                qalog.append( "%s: within one km of %s\n" % (
2073                    zone,
2074                    nearest[0]
2075                ) )
2076                zones_overlapping += 1
2077        else:
2078            qalog.append("%s: no centroid\n" % zone)
2079            zones_nocentroid += 1
2080        if not zones.has_option(zone, "description"):
2081            qalog.append("%s: no description\n" % zone)
2082            zones_nodescription += 1
2083        if not zones.has_option(zone, "zone_forecast"):
2084            qalog.append("%s: no forecast\n" % zone)
2085            zones_noforecast += 1
2086    if os.path.exists(qalog_fn):
2087        os.rename(qalog_fn, "%s_old"%qalog_fn)
2088    qalog_fd = codecs.open(qalog_fn, "w", "utf8")
2089    import time
2090    qalog_fd.write(
2091        '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
2092        '# use, copy, modify, and distribute this software is granted under terms\n'
2093        '# provided in the LICENSE file distributed with this software.\n\n'
2094        % time.gmtime().tm_year)
2095    qalog_fd.writelines(qalog)
2096    qalog_fd.close()
2097    if qalog:
2098        print("issues found (see %s for details):"%qalog_fn)
2099        if airports_badstation:
2100            print("   %s airports with invalid station"%airports_badstation)
2101        if airports_nostation:
2102            print("   %s airports with no station"%airports_nostation)
2103        if places_nocentroid:
2104            print("   %s places with no centroid"%places_nocentroid)
2105        if places_nodescription:
2106            print("   %s places with no description"%places_nodescription)
2107        if stations_nodescription:
2108            print("   %s stations with no description"%stations_nodescription)
2109        if stations_nolocation:
2110            print("   %s stations with no location"%stations_nolocation)
2111        if stations_nometar:
2112            print("   %s stations with no METAR"%stations_nometar)
2113        if zctas_nocentroid:
2114            print("   %s ZCTAs with no centroid"%zctas_nocentroid)
2115        if zones_nocentroid:
2116            print("   %s zones with no centroid"%zones_nocentroid)
2117        if zones_nodescription:
2118            print("   %s zones with no description"%zones_nodescription)
2119        if zones_noforecast:
2120            print("   %s zones with no forecast"%zones_noforecast)
2121        if zones_overlapping:
2122            print("   %s zones within one km of another"%zones_overlapping)
2123    else: print("no issues found.")
2124    print("Indexing complete!")
2125