1#!/usr/bin/env python3
2
3import json
4import argparse
5import copy
6import math
7from operator import itemgetter
8
9
10STRIDE_X = 3
11STRIDE_Y = 2
12MIN_X = 10000
13MIN_Y = 10000
14OM_SPEC_TYPES = ["overmap_special", "city_building"]
15KEYED_TERMS = [
16    "terrain", "furniture", "fields", "npcs", "signs", "vendingmachines",
17    "toilets", "gaspumps", "items", "monsters", "vehicles", "item", "traps",
18    "monster", "rubble", "liquids", "sealed_item", "graffiti", "mapping"
19]
20PLACE_TERMS = ["set", "place_groups"]
21MAP_ROTATE = [
22    {"dir": "_south", "x": 1, "y": 1},
23    {"dir": "_north", "x": 0, "y": 0}
24]
25
26
27def x_y_bucket(x, y):
28    return "{}__{}".format(math.floor((x - MIN_X) / STRIDE_X),
29                           math.floor((y - MIN_Y) / STRIDE_Y))
30
31
32def x_y_sub(x, y, is_north):
33    if is_north:
34        return "{}__{}".format((x - MIN_X) % STRIDE_X, (y - MIN_Y) % STRIDE_Y)
35    else:
36        return "{}__{}".format((x - MIN_X - 1) % STRIDE_X,
37                               (y - MIN_Y - 1) % STRIDE_Y)
38
39
40def x_y_simple(x, y):
41    return "{}__{}".format(x, y)
42
43
44def get_data(argsDict, resource_name):
45    resource = []
46    resource_sources = argsDict.get(resource_name, [])
47    if not isinstance(resource_sources, list):
48        resource_sources = [resource_sources]
49    for resource_filename in resource_sources:
50        if resource_filename.endswith(".json"):
51            try:
52                with open(resource_filename) as resource_file:
53                    resource += json.load(resource_file)
54            except FileNotFoundError:
55                exit("Failed: could not find {}".format(resource_filename))
56        else:
57            print(("Invalid filename {}".format(resource_filename)))
58    return resource
59
60
61def adjacent_to_set(x, y, coord_set):
62    for coords in coord_set:
63        if y == coords["y"] and abs(x - coords["x"]) == 1:
64            return True
65        if x == coords["x"] and abs(y - coords["y"]) == 1:
66            return True
67
68
69def validate_keyed(key_term, old_obj, entry):
70    new_keyset = entry["object"].get(term, {})
71    old_keyset = old_obj.get(key_term, {})
72    if new_keyset:
73        for old_key, old_val in old_keyset.items():
74            new_keyset.setdefault(old_key, old_val)
75            if new_keyset[old_key] != old_val:
76                return False
77    else:
78        new_keyset = old_keyset
79    return new_keyset
80
81
82# make sure that all keyed entries have the same key values and the maps have
83# the same weight and fill_ter.  Don't try to resolve them.
84def validate_old_map(old_map, entry):
85    old_obj = old_map.get("object", {})
86
87    if entry["weight"] and old_map.get("weight"):
88        if entry["weight"] != old_map.get("weight"):
89            return False
90    if entry["object"].get("fill_ter") and old_obj.get("fill_ter") and \
91            entry["object"]["fill_ter"] != old_obj.get("fill_ter"):
92        return False
93
94    new_palettes = entry.get("palettes", {})
95    old_palettes = old_obj.get("palettes")
96    if new_palettes:
97        for palette in old_palettes:
98            if palette not in new_palettes:
99                return False
100    else:
101        new_palettes = old_palettes
102
103    keysets = {}
104    for key_term in KEYED_TERMS:
105        new_keyset = validate_keyed(key_term, old_obj, entry)
106        if new_keyset:
107            keysets[key_term] = new_keyset
108        elif new_keyset != {}:
109            return False
110
111    if not entry["weight"]:
112        entry["weight"] = old_map.get("weight", 0)
113    if not entry["object"].get("fill_ter"):
114        entry["object"]["fill_ter"] = old_obj.get("fill_ter", "")
115    for key_term, new_keyset in keysets.items():
116        entry["object"][key_term] = new_keyset
117    if new_palettes:
118        entry["object"]["palettes"] = new_palettes
119
120    return True
121
122
123# adjust the X, Y co-ords of a place_ entry to match the new map
124def adjust_place(term, old_obj, offset_x, offset_y):
125    def adjust_coord(x_or_y, new_entry, old_entry, offset):
126        val = old_entry.get(x_or_y, "False")
127        if val == "False":
128            return False
129        if isinstance(val, list):
130            val[0] += offset
131            val[1] += offset
132        else:
133            val += offset
134        new_entry[x_or_y] = val
135
136    results = []
137    for old_entry in old_obj.get(term, []):
138        new_entry = copy.deepcopy(old_entry)
139        if offset_x:
140            adjust_coord("x", new_entry, old_entry, offset_x)
141            adjust_coord("x2", new_entry, old_entry, offset_x)
142        if offset_y:
143            adjust_coord("y", new_entry, old_entry, offset_y)
144            adjust_coord("y2", new_entry, old_entry, offset_y)
145        results.append(new_entry)
146    return results
147
148
149args = argparse.ArgumentParser(
150    description="Merge individual OMT maps into blocks of maps.")
151args.add_argument("mapgen_sources", action="store", nargs="+",
152                  help="specify jsons file to convert to blocks.")
153args.add_argument("specials_sources", action="store", nargs="+",
154                  help="specify json file with overmap special data.")
155args.add_argument(
156    "--x", dest="stride_x", action="store",
157    help="number of horizontal maps in each block.  "
158         "Defaults to {}.".format(STRIDE_X))
159args.add_argument(
160    "--y", dest="stride_y", action="store",
161    help="number of vertictal maps in each block.  "
162         "Defaults to {}.".format(STRIDE_Y))
163args.add_argument("--output", dest="output_name", action="store",
164                  help="Name of output file.  Defaults to the command line.")
165argsDict = vars(args.parse_args())
166
167mapgen = get_data(argsDict, "mapgen_sources")
168specials = get_data(argsDict, "specials_sources")
169string_x = argsDict.get("stride_x")
170if string_x and int(string_x):
171    STRIDE_X = int(string_x)
172string_y = argsDict.get("stride_y")
173if string_y and int(string_y):
174    STRIDE_Y = int(string_y)
175
176output_name = argsDict.get("output_name", "")
177if output_name and not output_name.endswith(".json"):
178    output_name += ".json"
179
180# very first pass, sort the overmaps and find the minimum X and Y values
181for special in specials:
182    if special.get("type") in OM_SPEC_TYPES:
183        overmaps = special.get("overmaps")
184        if not overmaps:
185            continue
186        overmaps.sort(key=lambda om_data: om_data.get("point", [1000, 0])[0])
187        MIN_X = overmaps[0].get("point", [1000, 0])[0]
188        overmaps.sort(key=lambda om_data: om_data.get("point", [0, 1000])[1])
189        MIN_Y = overmaps[0].get("point", [0, 1000])[1]
190
191# create the merge sets of maps
192merge_sets = {}
193for special in specials:
194    if special.get("type") in OM_SPEC_TYPES:
195        overmaps = special.get("overmaps")
196        for om_data in overmaps:
197            om_map = om_data.get("overmap")
198            for map_dir in MAP_ROTATE:
199                if om_map.endswith(map_dir["dir"]):
200                    om_map = om_map.split(map_dir["dir"])[0]
201                    break
202            om_point = om_data.get("point", [])
203            if len(om_point) == 3:
204                is_north = map_dir["dir"] == "_north"
205                x = om_point[0]
206                y = om_point[1]
207                z = om_point[2]
208                merge_sets.setdefault(z, {})
209                merge_sets[z].setdefault(x_y_bucket(x, y), {})
210                merge_sets[z][x_y_bucket(x, y)][x_y_sub(x, y, is_north)] = \
211                    om_map
212
213# convert the mapgen list into a dictionary for easier access
214map_dict = {}
215new_mapgen = []
216for om_map in mapgen:
217    if om_map.get("type") != "mapgen":
218        new_mapgen.append(om_map)
219        continue
220    om_id = om_map["om_terrain"]
221    if isinstance(om_id, list):
222        if len(om_id) == 1:
223            om_id = om_id[0]
224        else:
225            continue
226    map_dict[om_id] = om_map
227
228# dynamically expand the list of "place_" terms
229for term in KEYED_TERMS:
230    PLACE_TERMS.append("place_" + term)
231
232basic_entry = {
233    "method": "json",
234    "object": {
235        "fill_ter": "",
236        "rows": [],
237        "palettes": [],
238    },
239    "om_terrain": [],
240    "type": "mapgen",
241    "weight": 0
242}
243
244# debug: make sure the merge sets look correct
245#print("mergesets: {}".format(json.dumps(merge_sets, indent=2)))
246
247# finally start merging maps
248for z, zlevel in merge_sets.items():
249    for x_y, mapset in zlevel.items():
250        # first, split the mergeset into chunks with common KEYED_TERMS using a
251        # weird floodfill
252        chunks = []
253        chunks = [{"maps": [], "entry": copy.deepcopy(basic_entry)}]
254        for y in range(0, STRIDE_Y):
255            for x in range(0, STRIDE_X):
256                om_id = mapset.get(x_y_simple(x, y), "")
257                if not om_id:
258                    continue
259                old_map = map_dict.get(om_id, {})
260                if not old_map:
261                    continue
262                validated = False
263                for chunk_data in chunks:
264                    # try to filter out maps that are no longer adjacent to the
265                    # rest of the merge set due to other maps not being valid
266                    if chunk_data["maps"] and not adjacent_to_set(
267                            x, y, chunk_data["maps"]):
268                        continue
269                    # check that this map's keyed terms match the other keyed
270                    # terms in this chunk
271                    if validate_old_map(old_map, chunk_data["entry"]):
272                        chunk_data["maps"].append({"x": x, "y": y})
273                        validated = True
274                if not validated:
275                    new_entry = copy.deepcopy(basic_entry)
276                    chunks.append({"maps": [{"x": x, "y": y}],
277                                   "entry": new_entry})
278
279        # then split up any irregular shapes that made it past the screen
280        # T and L shapes are possible because every map is adjacent, for
281        # instance
282        final_chunks = []
283        for chunk_data in chunks:
284            maps = chunk_data["maps"]
285            if len(maps) < 3:
286                final_chunks.append(chunk_data)
287                continue
288            maps.sort(key=itemgetter("x"))
289            max_x = maps[-1]["x"]
290            min_x = maps[0]["x"]
291            maps.sort(key=itemgetter("y"))
292            max_y = maps[-1]["y"]
293            min_y = maps[0]["y"]
294            # if this is a line, square, or rectangle, it's continguous
295            if len(maps) == ((max_x - min_x + 1) * (max_y - min_y + 1)):
296                final_chunks.append(chunk_data)
297                continue
298            # if not, just break it into individual maps
299            for coords in maps:
300                final_chunks.append({
301                    "maps": [{"x": coords["x"], "y": coords["y"]}],
302                    "entry": copy.deepcopy(chunk_data["entry"])
303                })
304
305        if not final_chunks:
306            continue
307
308        # debug: do the final chunks look sane?
309        #print("chunks: {}".format(json.dumps(chunks, indent=2)))
310        # go through the final chunks and merge them
311        for chunk_data in final_chunks:
312            new_rows = []
313            entry = chunk_data["entry"]
314            maps = chunk_data["maps"]
315            if not maps:
316                continue
317            first_x = maps[0]["x"]
318            first_y = maps[0]["y"]
319            for coords in maps:
320                x = coords["x"]
321                y = coords["y"]
322                row_offset = 24 * (y - first_y)
323                col_offset = 24 * (x - first_x)
324                om_id = mapset.get(x_y_simple(x, y), "")
325                old_map = map_dict.get(om_id, {})
326                old_obj = old_map.get("object", {})
327                old_rows = old_obj.get("rows", {})
328                # merge the rows, obviously
329                for i in range(0, 24):
330                    if not col_offset:
331                        new_rows.append(old_rows[i])
332                    else:
333                        new_rows[i + row_offset] = \
334                            "".join([new_rows[i + row_offset], old_rows[i]])
335                if len(maps) == 1:
336                    entry["om_terrain"] = om_id
337                else:
338                    if len(entry["om_terrain"]) < (y - first_y + 1):
339                        entry["om_terrain"].append([om_id])
340                    else:
341                        entry["om_terrain"][y - first_y].append(om_id)
342                # adjust the offsets for place_ entries before potentially
343                # converting set entries
344                for place_term in PLACE_TERMS:
345                    new_terms = adjust_place(place_term, old_obj, col_offset,
346                                             row_offset)
347                    if new_terms:
348                        entry["object"].setdefault(place_term, [])
349                        for term_entry in new_terms:
350                            entry["object"][place_term].append(term_entry)
351            # finally done with the chunk, so add it to the list
352            entry["object"]["rows"] = new_rows
353            new_mapgen.append(entry)
354            # debug: make sure sure the final chunk is correct
355            #print("{}".format(json.dumps(entry, indent=2)))
356
357if output_name:
358    with open(output_name, 'w') as output_file:
359        output_file.write(json.dumps(new_mapgen))
360    exit()
361
362print("{}".format(json.dumps(new_mapgen, indent=2)))
363