basic minetest schematic serialize/deserialize
This commit is contained in:
commit
70170b8865
50
maptoolify_mts.py
Normal file
50
maptoolify_mts.py
Normal file
@ -0,0 +1,50 @@
|
||||
import mts
|
||||
|
||||
def maptoolify_nodes(schematic):
|
||||
node_map = {
|
||||
b'default:stone': b'maptools:stone',
|
||||
b'default:stonebrick': b'maptools:stonebrick',
|
||||
b'default:tree': b'maptools:tree',
|
||||
b'default:jungletree': b'maptools:jungletree',
|
||||
b'default:cactus': b'maptools:cactus',
|
||||
b'default:papyrus': b'maptools:papyrus',
|
||||
b'default:dirt': b'maptools:dirt',
|
||||
b'default:wood': b'maptools:wood',
|
||||
b'default:junglewood': b'maptools:junglewood',
|
||||
b'default:glass': b'maptools:glass',
|
||||
b'default:leaves': b'maptools:leaves',
|
||||
b'default:sand': b'maptools:sand',
|
||||
b'default:gravel': b'maptools:gravel',
|
||||
b'default:clay': b'maptools:clay',
|
||||
b'default:desert_sand': b'maptools:desert_sand',
|
||||
b'default:sandstone': b'maptools:sandstone',
|
||||
b'default:sandstonebrick': b'maptools:sandstone_brick',
|
||||
b'default:desert_stone': b'maptools:desert_stone',
|
||||
b'default:desert_cobble': b'maptools:desert_cobble',
|
||||
b'default:desert_stonebrick': b'maptools:desert_stonebrick',
|
||||
b'default:dirt_with_grass': b'maptools:grass',
|
||||
# b'default:fullgrass': b'maptools:fullgrass', # maptools only?
|
||||
b'default:cobble': b'maptools:cobble',
|
||||
b'default:mossycobble': b'maptools:mossycobble',
|
||||
b'default:brick': b'maptools:brick',
|
||||
b'default:coalblock': b'maptools:coalblock',
|
||||
b'default:steelblock': b'maptools:steelblock',
|
||||
b'default:goldblock': b'maptools:goldblock',
|
||||
b'default:copperblock': b'maptools:copperblock',
|
||||
b'default:bronzeblock': b'maptools:bronzeblock',
|
||||
b'default:diamondblock': b'maptools:diamondblock',
|
||||
b'farming:soil_wet': b'maptools:soil_wet',
|
||||
b'farming:desert_sand_soil_wet': b'maptools:desert_sand_soil_wet',
|
||||
}
|
||||
|
||||
for i, name in enumerate(schematic.name_id_mapping):
|
||||
if name in node_map:
|
||||
schematic.name_id_mapping[i] = node_map[name]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
schem = mts.read_schematic_from_file(sys.argv[1])
|
||||
maptoolify_nodes(schem)
|
||||
mts.write_schematic_to_file(schem, sys.argv[2])
|
108
mts.py
Normal file
108
mts.py
Normal file
@ -0,0 +1,108 @@
|
||||
# https://github.com/minetest/minetest/blob/5.10.0/src/mapgen/mg_schematic.h
|
||||
|
||||
import struct
|
||||
|
||||
import zlib
|
||||
|
||||
import io
|
||||
|
||||
|
||||
class Schematic():
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def deserialize(self, data):
|
||||
assert data[0:4] == b'MTSM', "not a schematic file"
|
||||
assert data[4:6] == b'\x00\x04', "this script support only version 4"
|
||||
|
||||
offset = 6
|
||||
|
||||
def read_unpack(format_s):
|
||||
nonlocal offset
|
||||
size = struct.calcsize(format_s)
|
||||
res = struct.unpack_from(format_s, data, offset)
|
||||
offset += size
|
||||
return res
|
||||
|
||||
self.size_x, self.size_y, self.size_z = read_unpack(">3H")
|
||||
# print(f"{self.size_x = }")
|
||||
# print(f"{self.size_y = }")
|
||||
# print(f"{self.size_z = }")
|
||||
|
||||
self.slice_prob = read_unpack(str(self.size_y) + "B")
|
||||
# print(f"{self.slice_prob = }")
|
||||
|
||||
name_id_count, = read_unpack(">H")
|
||||
name_id_mapping = [None] * name_id_count
|
||||
for i in range(name_id_count):
|
||||
l, = read_unpack(">H")
|
||||
name_id_mapping[i] = data[offset:offset+l]
|
||||
offset += l
|
||||
self.name_id_mapping = name_id_mapping
|
||||
# print(f"{self.name_id_mapping = }")
|
||||
|
||||
dobj = zlib.decompressobj()
|
||||
node_data = dobj.decompress(data[offset:])
|
||||
assert dobj.unused_data == b'', "something went wrong: extra data at the end of file"
|
||||
|
||||
# restart in uncompressed data
|
||||
offset = 0
|
||||
data = node_data
|
||||
|
||||
# stored indexed by z, y, x
|
||||
number_of_nodes = self.size_x * self.size_y * self.size_z
|
||||
number_of_nodes_s = str(number_of_nodes)
|
||||
self.param0 = read_unpack(">" + number_of_nodes_s + "H")
|
||||
self.param1 = read_unpack(number_of_nodes_s + "B")
|
||||
self.param2 = read_unpack(number_of_nodes_s + "B")
|
||||
assert offset == len(node_data), "uncompressed data has unexpected size"
|
||||
|
||||
|
||||
def serialize(self):
|
||||
data = io.BytesIO()
|
||||
data.write(b'MTSM\x00\x04')
|
||||
|
||||
def pack_write(format_s, *args):
|
||||
data.write(struct.pack(format_s, *args))
|
||||
return
|
||||
|
||||
pack_write(">3H", self.size_x, self.size_y, self.size_z)
|
||||
pack_write(str(self.size_y) + "B", *self.slice_prob)
|
||||
pack_write(">H", len(self.name_id_mapping))
|
||||
for i, name in enumerate(self.name_id_mapping):
|
||||
pack_write(">H", len(name))
|
||||
data.write(name)
|
||||
|
||||
node_data = io.BytesIO()
|
||||
number_of_nodes_s = str(self.size_x * self.size_y * self.size_z)
|
||||
node_data.write(struct.pack(">" + number_of_nodes_s + "H", *self.param0))
|
||||
node_data.write(struct.pack(number_of_nodes_s + "B", *self.param1))
|
||||
node_data.write(struct.pack(number_of_nodes_s + "B", *self.param2))
|
||||
|
||||
COMPRESSION_LEVEL = 6 # default in python, specifying in case something changes
|
||||
data.write(zlib.compress(node_data.getvalue(), COMPRESSION_LEVEL))
|
||||
|
||||
return data.getvalue()
|
||||
|
||||
|
||||
def read_schematic_from_file(filename):
|
||||
with open(filename, 'rb') as f:
|
||||
s = Schematic()
|
||||
s.deserialize(f.read())
|
||||
return s
|
||||
|
||||
|
||||
def write_schematic_to_file(schematic, filename):
|
||||
with open(filename, 'xb') as f:
|
||||
f.write(schematic.serialize())
|
||||
|
||||
|
||||
|
||||
# if __name__ == "__main__":
|
||||
# import sys
|
||||
# schem = read_schematic_from_file(sys.argv[1])
|
||||
|
||||
# param0_mapped = list(map(lambda idx: schem.name_id_mapping[idx], schem.param0))
|
||||
# print(f"{param0_mapped = }")
|
||||
|
||||
# write_schematic_to_file(schem, sys.argv[2])
|
Loading…
Reference in New Issue
Block a user