diff --git a/Dat2ObjTex.py b/Dat2ObjTex_old.py similarity index 100% rename from Dat2ObjTex.py rename to Dat2ObjTex_old.py diff --git a/Dat2Obj.py b/Dat2Obj_old.py similarity index 100% rename from Dat2Obj.py rename to Dat2Obj_old.py diff --git a/LINUX_INSTALL b/LINUX_INSTALL new file mode 100644 index 0000000..df181a7 --- /dev/null +++ b/LINUX_INSTALL @@ -0,0 +1,26 @@ +INSTALLATION NOTES FOR LINUX SYSTEMS +==================================== + +Just unpack the stuff somewhere and make sure the next files are executable: + +* Dat2Mesh.py +* Dat2Obj_old.py +* dat2obj.py +* Dat2ObjTex_old.py +* DatScale.py +* Mesh2Dat.py +* Mesh2DatTex.py +* Mesh2Obj.py +* Obj2DatTexNorm.py +* Obj2DatTex.py + +Then, calling one of these file with the './' prefix will make it run using your +Python 2 interpreter! + +Have a look to the 'README.md' file for usage. +`dat2obj.md` contains specific information for `dat2obj.py` program. + +If you are using Python 3 instead of Python 2, 'dat2obj.py' program will work if +you convert it with '2to3' Python script. +No waranty for the other ones! + diff --git a/README.md b/README.md index 7d34a83..b44d0fa 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,11 @@ A set of Python scripts to convert models to and from Oolite’s DAT format. +The *Dat2Obj.py* and *Dat2ObjTex.py* has been both replaced by *dat2obj.py* one and respectively renamed *Dat2Obj_old.py* and *Dat2ObjTex_old.py*. + +*dat2obj.py* can handle `.dat` files containing mono-textured objects or multi-textured object, including normals. +See `dat2obj.md` for more information. + The tools are: *Obj2DatTexNorm.py*: convert a mesh in OBJ format to DAT, preserving texture coordinates and vertex normals. This is now the recommended conversion tool. The meshes it produces require Oolite test release 1.74 or later. diff --git a/dat2obj.md b/dat2obj.md new file mode 100644 index 0000000..4836a63 --- /dev/null +++ b/dat2obj.md @@ -0,0 +1,105 @@ +# Documentation for `dat2obj.py` + +`dat2obj.py` is the heir of `Dat2Obj.py` and `Dat2ObjTex.py` and their replacer. It includes normals and multi-texture support. + +To convert correctly multi-textured objects, you will have to use `.oti` files if a *NAMES* section exists in their `.dat` files. +See __What are `.oti` files?__, __How to write a `.oti` file?__ and __Feeling `.oti` lazy?__ sections. + + +## How to use it? + +The usage of this program is very like than the former ones. +Just call it with at least one `.dat` file, and it will create `.obj` and `.mtl` files. +However, a `--help` and a `--debug` command line arguments have beed added. + +On a Linux system (provided the file is executable), if you enter in the shell: + +``` +$ ./dat2obj.py -h +``` + +You'll get: + +``` +============================================================================== +dat2obj.py 1.0.0 +(C) Giles Williams 2005 and Kaks 2008 / LaChal 2018. + +Converts Oolite .dat files into Wavefromt .obj and .mtl ones. + +dat2obj.py <.dat_file_name_1> [[<.dat_file_name_2 [...]] [--debug] + +-h --help Print this screen and exits regardless other options. + --debug Writes output files. + +When '--debug' is given, several dump files are witten and contain the program +internal data: +.fac Faces. +.nor Normals. +.sec Sections data as found in .dat files. +.tex Textures data. +.txm Textures aliases/real names map. + +``` + +For named textures models, you'll need to create a `.oti` file before running `dat2obj.py` to have a correct `.mtl` file. +See __What are `.oti` file?__ below. + + +## What `.dat` files is it able to convert? + +This program shall be able to convert any Oolite `.dat` file. + +However, some 'ill-formated' files (like `alloy.dat`, `buoy.dat` and `scarred_alloy.dat`) can't be converted. + + +## What are 'named' and 'indexed' textures? + +'Named' textures are the ones which image file name is written in the *FACES* section in `.dat` files. +'Indexed' textures are the ones which uses an alias in the *NAMES* section and are referenced by the alias number in the *FACES* section. + +`dat2obj.py` is able to handle both, however, when 'indexed' textures ar used in `.dat` files, you'll need to have a corresponding `.oti` file. + + +## What are `.oti` files? + +These files are used to store the real texture file names in the same order than in the *NAMES* section if the `.dat` file. +They are simple raw text files containing one texture file name a line. + +Without the corresponding `.oti` file, `dat2obj.py` will not be able to write the image file name for the textures in the `.mtl` file. + + +## How to write a `.oti` file? + +Let's use an example: you want to convert `oolite_anaconda.dat`. + +* First, create an empty text file named `oolite_anaconda.oti` alongside the `.dat` one. Be sure that the name ends with `.oti`, not something else! +* Then open `oolite_anaconda.dat` in a text editor, and search for *NAMES*. + Here, you have three names: *Engine*, *Gun* and *Hull*, in this order. +* Then, open `shipdata.plist` (or the `.plist` file in which the game object using this model is defined). + Look for the line `oolite_template_anaconda`. Insude this section, you'll find a `materials` object defined. + This section has objects defined with the same names than in the *NAMES* section in the `.dat` file. +* In the `.oti` file, write the file name defined by *diffuse_map* in the section named like the names we have in the `.dat` file. + Write these file names in the same order the names are in the `.dat` file! + +The final `.oti` file will be: + +``` +oolite_anaconda_subents.png +oolite_anaconda_subents.png +oolite_anaconda_diffuse.png +``` + +## Feeling `.oti` lazy? + +If you don't want to create yourself `.oti` files, you can try the `build_otis.py` Python program in the `test` directory. + +This program requires two command line arguments: +* The path to the `.plist` file to get the texture file names from. +* The path to the directory where the `.dat` files you want to convert are. + +All the `.dat` files found will be scanned, and corresponding `.oti` files created. +If a `.dat` file do not have a *NAMES* section, no corresponding `.oti` file is generated! + +Note that the `pbPlist` Python module is required for `build_otis.py` to work. + diff --git a/dat2obj.py b/dat2obj.py new file mode 100755 index 0000000..f2d6119 --- /dev/null +++ b/dat2obj.py @@ -0,0 +1,661 @@ +#!/usr/bin/python +# +# -*- coding: utf-8 -*- +# +# dat2obj.py v1.0.2 +# +""" +dat2obj.py by D.C.-G. (LaChal) 2018. +Originaly Dat2ObjTex.py by Giles Williams and Kaks. +This version supports multiple textures. + +----------------------------- NOTES ON TEXTURES ----------------------------- +Textures in .dat files can be referenced by their name or by index. +When an index is used, the texture pseudo-name can be found in the NAMES +section in the .dat file. +The first name in this section is index 0, the second one index 1, etc. +The names are the same as the entries names in the 'materials' object in the +.plist file which defines the Oolite object. + +When real texture names are used, they can simply be added to the .mtl file. + +When using indexed texture names, we need to know which texture name match +which index/pseudo-name, and which index/pseudo-name match which real name. +To do that, a simple text file named like the .dat, but with a .oti (Oolite +Texture Index) extension can be created alongside the .dat file to convert. +The format of this .oti file is very simple: one texture file name a line. +The first name will be the index 0 found in the .dat file, the second one +index 1, and so on. +Creating these .oti files externally will be removed in a future version which +will gnenerate the needed data directly.. +""" +__authors__ = "(C) Giles Williams 2005 and Kaks 2008 / LaChal 2018." +__version__ = "1.0.2" + +import os +import sys +import re +import pprint +from collections import OrderedDict + + +__prog_name__ = os.path.basename(__file__) + +__help__ = """%s + +Converts Oolite .dat files into Wavefromt .obj and .mtl ones. + +%s <.dat_file_name_1> [[<.dat_file_name_2 [...]] [--debug] + +-h --help Print this screen and exits regardless other options. + --debug Writes output files. + +When '--debug' is given, several dump files are witten and contain the program +internal data: +.fac Faces. +.nor Normals. +.sec Sections data as found in .dat files. +.tex Textures data. +.txm Textures aliases/real names map. + +""" % (__authors__, __prog_name__) + + +#-------------------------- OUTPUT FILES TEMPLATES --------------------------- +COMMON_HEADER = """# Exported with Dat2ObjTex.py (C) Giles Williams 2005 - Kaks 2008 +# Revamped by D.C.-G. 2018.""" + +OBJ_TEMPLATE = """{header} +mtllib {mtl_lib_file} +o {obj_name} +# {n_verts} vertices, {n_faces} faces, {n_norms} normals +{vertex_lines} +{tex_lines} +{norms_lines} +{faces_lines} + +""" + +FACES_TEMPLATE = """g {obj_name}_{tex_name} +usemtl {tex_name} +{faces_lines} +""" + +MATERIAL_TEMPLATE = """{header} +# Material number {mat_num} +{materials} +""" + +MATERIAL_ENTRY_TEMPLATE = """newmtl {mtl_name} +Ns 100.000 +d 1.00000 +illum 2 +Kd 1.00000 1.00000 1.00000 +Ka 1.00000 1.00000 1.00000 +Ks 1.00000 1.00000 1.00000 +Ke 0.00000e+0 0.00000e+0 0.00000e+0 +map_Kd {tex_file} + +""" + + +#----------------------------- HELPER FUNCTIONS ------------------------------ +def __exit(msg, code=0, std=sys.stdout): + """Quit the program displaying 'msg' (or nothing) with exit code 'code'. + Not intended to be used directly! + :msg: string: Message to display. If an empty string is given, nothing is + written to the output. + :code: int: The exit code to send back to the system/program. + Defaults to 0. + :std: object: The 'std' file descriptor to wirte the message to. + Defaults to sys.stdout. + Valid values are sys.stdout and sys.stderr, or raise an IOError. + Don't return anything... + """ + if std not in (sys.stderr, sys.stdout): + raise IOError("Bad output file descriptor '%s'." % std) + if not msg.endswith(os.linesep): + msg += os.linesep + std.write(msg) + sys.exit(code) + + +def _exit(msg): + """Normal program termination. Calls '__exit' with 'msg' argument. + :msg: string: Message to display. + Exit code is always 0. + Don't return anything... + """ + __exit(msg) + + +def _error(msg, code=1): + """Display '! ! ! ERROR:' followed by 'msg' then exit program with 'code'. + :msg: string: Message to display. + :code: int: Exit code to use when program exits. + Defaults to 1. + Don't return anything... + """ + __exit("! ! ! ERROR: %s" % msg, code, sys.stderr) + + +def check_cli(): + """Reads sys.argv and process arguments. + Returns a tuple: (bool:debug_mode, list:input_file_names). + """ + if "--help" in sys.argv or "-h" in sys.argv: + _exit(__help__) + debug = False + if "--debug" in sys.argv: + debug = True + sys.argv.remove("--debug") + input_file_names = sys.argv[1:] + return debug, input_file_names + + +def split_line(line): + """Splits a line on commas or blank characters. + :line: string: The line to be split. + Returns a list of strings.""" + if ',' in line: + return [a.strip() for a in line.split(',')] + return line.split() + + +def build_file_path(dir_name, file_name, ext): + """Rebuilds a file path. + :dir_name: string: The directory where the file lies. + :file_name: string: The name of the file. + :ext: string: The file extension. + Return a string. + """ + return os.path.join(dir_name, os.path.extsep.join((file_name, ext))) + + +def write_dump_file(dir_name, file_name, ext, datas): + """Writes a dump file for debugging internal data. + :dir_name, :file_name and :ext: See 'build_file_path' docstring. + :datas: dictionary: Keys are data names and values datas to be dumped. + """ + f_name = build_file_path(dir_name, file_name, ext) + with open(f_name, "w") as fd_out: + fd_out.write("Dump file for %s" % file_name) + for name, data in datas.items(): + fd_out.write("\n\n%s\n%s\n\n" % ("-" *80, name)) + if isinstance(data, dict): + for key, value in data.items(): + if isinstance(value, OrderedDict): + value = dict(value) + fd_out.write('"%s": %s\n' % (key, pprint.pformat(value, indent=4))) + else: + fd_out.write(pprint.pformat(data, indent=4)) + + +def _check_nentries(sections, num_def, dat_def): + """Used by 'check_...' functions. + :sections: dictionary: The object send by 'get_sections' function. + :num_def: string: The object in :section to get the number of entries. + :dat_def: string: The name of the object in :sections to check the entries + number in. + """ + print " * Checking %s" % num_def + nentries = int(sections[num_def]["arguments"].split()[0]) + if nentries == len(sections[dat_def]["data"]): + return True + return False + + +def _parse_vn(lines, out_format): + """Used to parse data which have same input shape and return same type of + objects. + :lines: list of string: Data to be parsed. + :format: string: Format to be used to build the result. + Returns a tuple: + (int:number, list:lines_out) + Used by parse_VERTEX and parse_NORMALS. + """ + number = 0 + lines_out = [] + loa = lines_out.append + for line in lines: + coordinates = split_line(line) + if len(coordinates) == 3: + number += 1 + loa(out_format % (-float(coordinates[0]), + float(coordinates[1]), + float(coordinates[2]))) + return number, lines_out + + +def _get_tex_name(tex_map, idx, suffix="_auv"): + """Returns a texture name built according to :tex_map data. + :tex_map: dictionary: The texture map to search in. + :idx: string: The index to search for in :tex_map. + :suffix: string: The suffix to be added to the texture name. + Defaults to '_auv'. + If :idx is not found in :tex_map, a default name is built like this: + 'tex'. + Returns the texture name as string. + """ + return "%s%s" % (tex_map.get(idx, {}).get("alias", "tex%s" % idx), suffix) + + +#------------------------------ MAGIC FUNCTIONS ------------------------------ +# These functions are called 'magically' and only if they exists. +def check_nverts(sections): + """Verify if the number of lines in VERTEX section are exactly as + defined in NVERTS. + :sections: dictionary: The object send by 'get_sections' function. + """ + return _check_nentries(sections, "NVERTS", "VERTEX") + + +def check_nfaces(sections): + """Verify if the number of lines in FACES section are exactly as + defined in NFACES. + :sections: dictionary: The object send by 'get_sections' function. + """ + return _check_nentries(sections, "NFACES", "FACES") + + +def check_names(sections): + """Verify the texture names length is as declared in NAMES. + :sections: dictionary: The object send by 'get_sections' function. + """ + return _check_nentries(sections, "NAMES", "NAMES") + + +#-------------------------- DATA PARSING FUNCTIONS --------------------------- +def parse_textures(lines): + """Parses the TEXTUES data and build new data to be used later and written + in .obj file. + :lines: list of strings: The TEXTURES lines as found in the .dat file. + Returns a tuple: + (dict:textures_references, list:.obj_file_textures)""" + print " * Parsing textures" + + def tex_index(tex, vts): + """Returns the index of 'tex' in 'vts' or -1 if not found. + :tex: string: Preformated string to be found. + :vts: list of preformatted strings: Where to find 'tex'. + Returns a signed int.""" + if tex in vts: + return vts.index(tex) + return -1 + + tex_refs = {} + named = OrderedDict() + numbered = OrderedDict() + vts = [] + vtsa = vts.append + tex_lines_out = [] + tloa = tex_lines_out.append + n_faces = 0 + for line in lines: + # It may happen that some lines uses more than one tab to separate + # values, so let's remove empty elements in the split result. + tokens = filter(None, line.split("\t")) + tex_name = tokens[0] + if tex_name not in named.keys(): + named[tex_name] = {} + if n_faces not in numbered.keys(): + numbered[n_faces] = {} + tex_for_face = named[tex_name] + points = tokens[2:] + tff = [] + + for point in points: + v_data = point.split() + vt_data = '%.6f %.6f' % (float(v_data[0]), 1 - float(v_data[1])) + if vt_data not in vts: + vtsa(vt_data) + tloa("vt %s" % vt_data) + tff[len(tff):] = [tex_index(vt_data, vts)] + tex_for_face[n_faces] = tff + numbered[n_faces] = tff, tex_name + n_faces += 1 + tex_refs = {"named": named, "numbered": numbered} + return tex_refs, tex_lines_out + + +def parse_vertex(lines): + """Parses the VERTEX data and return new data to be written in .obj file. + :lines: list of strings: The VERTEX lines as found in the .dat file. + Returns a tuple: + (int:number_of_vertex, list:.obj_file_vertex)""" + print " * Parsing vertex" + return _parse_vn(lines, "v %.6f %.6f %.6f") + + +def parse_normals(lines): + """Parses the NORMALS data and return new data to be written in .obj file. + :lines: list of strings: The NORMALS lines as found in the .dat file. + Returns a tuple: + (int:number_of_normals, list:.obj_file_normals)""" + print " * Parsing normals" + return _parse_vn(lines, "vn %.6f %.6f %.6f") + + +def parse_faces(lines, tex_for_face, n_normals): + """Parses the FACES data and new data to be written in .obj file. + :lines: list of strings: The FACES lines as found in the .dat file. + :tex_for_face: dict: Contains texture information as parsed by + 'parse_textures'. + :n_normals: int: The number of lines in NORMALS .dat file entry. + Returns a tuple: + (int:number_of_faces, dict:faces_groups) + dict:faces_groups contains lists of lines to be written in the .obj file, + according to the texture they belong to.""" + print " * Parsing faces" + + def build_face_no_norm(p_d, f_i, *args): + """Builds a 'face' without mormal reference. + :p_d: int: Point data. + :f_i: int: Face 'info'. + Returns a string: '<:p_d + 1>/<:f_i + 1>/ '. + """ + # *args are unused, don't keep them. + del args + return "%s/%s/ " % (p_d + 1, f_i + 1) + + def build_face_norm(p_d, f_i, n_i): + """Builds a 'face' with mormal reference. + :p_d: int: Point data. + :f_i: int: Face 'info'. + :n: int: The normal reference index. + returs a string: '<:p_d + 1>/<:f_i + 1>/ '. + """ + return "%s/%s/%s " % (p_d + 1, f_i + 1, n_i + 1) + + build_face = build_face_no_norm + if n_normals: + build_face = build_face_norm + + n_faces = 0 + faces_groups = OrderedDict() + + tex_numbered = tex_for_face["numbered"] + tex_named = tex_for_face["named"] + for line in lines: + tokens = split_line(line) + if len(tokens) > 9: + # color_data and normal_data are not (yet) used... + # normal_data will be used to write 'vn' entries in the .obj file. + ## color_data = tokens[0:3] + ## normal_data = tokens[3:6] + n_points = int(tokens[6]) + point_data = tokens[7:] + faces = "" + face_info = tex_numbered[n_faces] + tex_name = face_info[-1] + if tex_name not in faces_groups.keys(): + faces_groups[tex_name] = [] + + # Verify the face data in also in the 'named' texture data. + verif = tex_named.get(tex_name) + err = "" + if not verif: + err = "Could not find name '%s' in references." % tex_name + verif = verif.get(n_faces) + if not verif: + err = "Could not find index %s in references." % n_faces + if err: + raise KeyError(err) + + floa = faces_groups[tex_name].append + f_i = face_info[0] + for i in xrange(n_points): + faces += build_face(int(point_data[i]), f_i[i], n_faces) + floa("f %s" %faces) + n_faces += 1 + + return n_faces, faces_groups + + +def parse_names(lines, oti_file_name): + """Parses the NAMES data. + :lines: array of strings: The data to be parsed. + :oti_file_name: string: The .oti file to read texture file names from. + Returns a dict like: + {"line_index": "alias": "", "name": ""} + """ + print " * Parsing names" + # Read the real texture file names form the file. + real_names = [] + if os.path.isfile(oti_file_name): + with open(oti_file_name, "rU") as oti_fd: + real_names = oti_fd.read().splitlines() + + names = {} + for i, line in enumerate(lines): + name = "." + if i < len(real_names): + name = real_names[i] + names["%s" % i] = {"alias": line, "name": name} + return names + + +#------------------------------ CORE FUNCTIONS ------------------------------- +def get_sections(data): + """Parses 'data' to get the sections defined in. + :data: string: Raw .dat file content. + Returns an OrderedDict like: + {"SECTION_NAME": {"arguments": "what follows SECTION_NAME", + "data": [list of lines in section data]}} + """ + print " * Extracting sections" + sections = OrderedDict() + + results = re.finditer(r"^([A-Z][A-Z]+)([ ]+.*)?$", data, re.M) + data_start = None + data_end = None + prev_section = None + cur_section = None + for res in results: + print " * Found", res.groups()[0] + data_end = res.start() + if prev_section is not None: + # Get rid of potential comments at the end of a line. + _data = re.sub(r"\s*#.*", "", data[data_start:data_end]) + sections[prev_section]["data"] = filter(None, _data.splitlines()) + data_start = res.end() + cur_section = res.groups()[0] + sections[cur_section] = {"arguments": res.groups()[1], "data": ""} + prev_section = "%s" % cur_section # Only to be sure we get a brand new string... + + return sections + + +def update_tex_map(tex_map, tex_keys): + """Updates :tex_map with :tex_keys. Existing data in :tex_map is not + changed. + :tex_map: dictionary: Textures map to be updated. + :tex_keys: list of strings: Keys to be added to :tex_map. + Is considered to be a list of file names WITH its extension. + Returns updated :tex_map. + """ + for key in tex_keys: + if key not in tex_map: + tex_map[key] = {"alias": os.path.splitext(key)[0], "name": key} + return tex_map + + +def write_obj(output_file_name, obj_name, mtl_lib_file, tex_lines, + tex_map, n_verts, vertex_lines, n_normals, + normals_lines, n_faces, faces_groups): + """Builds the data and writes the .obj file. + :output_file_name: string: File path to be written. + :obj_name: string: The object name. + :mtl_lib_file: string: The .mtl file name. + :tex_lines: list of strings: The texture data. + :tex_map: dictionary: Contains texture alias/name data. + :n_verts: int: Number of vertex. + :vertex_lines: list of string: The vertex data. + :n_normals: int: Number of normals. + :normals_lines: list of strings: Normals data. + :n_faces: int: Number of faces. + :faces_groups: dictionary: Faces data groupped by texture index. + """ + + def _join(lns): + """Joins lines. + :lns: list of strings: Lines to join. + Returns joined lines as string. + """ + return "\n".join(lns) + + # Rebuild the faces data first. + faces = "" + for idx, lines in faces_groups.items(): + # Get the texture 'alias' or use a default value + tex_name = _get_tex_name(tex_map, idx) + faces += FACES_TEMPLATE.format(obj_name=obj_name, tex_name=tex_name, + faces_lines=_join(lines)) + + # 'Apply' data to the template. + with open(output_file_name, "w") as fd_out: + fd_out.write(OBJ_TEMPLATE.format(header=COMMON_HEADER, + mtl_lib_file=mtl_lib_file, + obj_name=obj_name, + n_verts=n_verts, + n_faces=n_faces, + n_norms=n_normals, + vertex_lines=_join(vertex_lines), + tex_lines=_join(tex_lines), + norms_lines=_join(normals_lines), + faces_lines=faces)) + print " * Saved '%s'." % output_file_name + + +def write_mtl(output_file_name, tex_map): + """Builds the data and writes the .mtl file. + :output_file_name: string: File path to be written. + :tex_map: dict: Texture map to find texture file names. + """ + + def _build_entry(_tex_map, _idx="0"): + """Builds a .mtl file entry. + :_tex_map: dictionary: Map to look into. + :_idx: string: The index to look for. + Defaults to "0". + Returns string data.""" + return MATERIAL_ENTRY_TEMPLATE.format( + mtl_name=_get_tex_name(tex_map, _idx), + tex_file=tex_map.get(_idx, {}).get("name", ".")) + + materials = "" + mat_num = len(tex_map) + if mat_num: + for idx in sorted(tex_map.keys()): + materials += _build_entry(tex_map, idx) + else: + # Let define a default material when there's no map at all. + materials += _build_entry(tex_map) + + with open(output_file_name, "w") as fd_out: + fd_out.write(MATERIAL_TEMPLATE.format(header=COMMON_HEADER, + mat_num=mat_num, + materials=materials)) + print " * Saved '%s'." % output_file_name + + +def main(): + """Main function of the program.""" + print "=" * 78 + print "%s %s" % (__prog_name__, __version__) + debug, input_file_names = check_cli() + if not input_file_names: + _error("No input file name found!\n\n%s" % __help__) + for input_file_name in input_file_names: + print "* Reading", input_file_name + file_base_name = os.path.splitext(os.path.basename(input_file_name))[0] + file_dir_name = os.path.dirname(input_file_name) + sections = {} + tex_map = {} + with open(input_file_name, 'rU') as in_fd: + sections = get_sections(in_fd.read()) + + if debug: + write_dump_file(file_dir_name, file_base_name, "sec", + {"sections": sections}) + + if not sections: + _error("Nothing could be read from '%s'.\nIs this an Oolite .dat file?" \ + % input_file_name) + + # Magically call the 'check' functions + for name in sections.keys(): + f_name = "check_%s" % name.lower() + if f_name in globals().keys(): + if not globals()[f_name](sections): + _error("Number of entries in '%s' section is different as declared!" % name) + + def get_data(name, sections=sections): + """Returns the 'data' object from the 'name' one found in the + 'sections' one. + :sections: dictionary: Object returned by 'get_sections'. + :name: string: The name of the section to get the 'data'. + Returns a list of 'lines'. + """ + return sections.get(name, {}).get("data", []) + + oti_file_name = build_file_path(file_dir_name, file_base_name, "oti") + tex_map = parse_names(get_data("NAMES"), oti_file_name) + + tex_refs, tex_lines_out = parse_textures(get_data("TEXTURES")) + + if debug: + write_dump_file(file_dir_name, file_base_name, "tex", + {"tex_refs": tex_refs, + "tex_lines_out": tex_lines_out}) + + # Update the tex_map object if textures indexes and names are both + # used in 'TEXTURES'. + if sorted(tex_map.keys()) != sorted(tex_refs.get("named").keys()): + tex_map = update_tex_map(tex_map, + set(tex_refs["named"].keys()).difference(tex_map.keys())) + + if debug: + write_dump_file(file_dir_name, file_base_name, "txm", + {"tex_map": tex_map}) + + n_verts, vertex_lines_out = parse_vertex(get_data("VERTEX")) + + if debug: + write_dump_file(file_dir_name, file_base_name, "ver", + {"n_verts": n_verts, + "vertex_lines_out": vertex_lines_out}) + + n_normals, normals_lines_out = parse_normals(get_data("NORMALS")) + + if debug: + write_dump_file(file_dir_name, file_base_name, "nor", + {"n_normals": n_normals, + "normals_lines_out": normals_lines_out}) + + n_faces, faces_groups = parse_faces(get_data("FACES"), tex_refs, + normals_lines_out) + + if debug: + write_dump_file(file_dir_name, file_base_name, "fac", + {"n_faces": n_faces, + "faces_groups": faces_groups}) + + output_file_name = build_file_path(file_dir_name, + file_base_name, 'obj') + material_file_name = build_file_path(file_dir_name, + file_base_name, 'mtl') + mtl_lib_file = os.path.basename(material_file_name) + + write_obj(output_file_name, file_base_name, mtl_lib_file, + tex_lines_out, tex_map, n_verts, vertex_lines_out, + n_normals, normals_lines_out, n_faces, faces_groups) + + write_mtl(material_file_name, tex_map) + + _exit("* Done") + + +#--------------------------------- BOOTSTRAP --------------------------------- +if __name__ == '__main__': + main() diff --git a/test/build_otis.py b/test/build_otis.py new file mode 100644 index 0000000..f9fe08b --- /dev/null +++ b/test/build_otis.py @@ -0,0 +1,137 @@ +# -*- coding: utf-8 -*- +# +# build_otis.py +# +# (C) D.C.-G. (LaChal) 2018 +# +# Build .oti files fo dat2obj.py +# +# Reads a given .plist file and extract the texture data needed by dat2obj.py +# Requires pbPlist v1.0.3 (https://pypi.python.org/pypi/pbPlist/1.0.3) +# +""" +Builds .oti files for dat2obj.py + +Usage: + +python build_oti.py [] + +""" +from __future__ import unicode_literals + +import os +import sys + +# Ensure the virtual environment is loaded if it exists. +# Deactivate pylint import order checks since we need to activate the virtual environment before +# importing pbPList. +# pylint: disable=wrong-import-position +__VENV = False +if os.path.isdir(".venv/lib/python2.7"): + if sys.platform == "win32": + ACTIVATE_THIS = os.path.abspath(".venv/Scripts/activate_this.py") + else: + ACTIVATE_THIS = os.path.abspath(".venv/bin/activate_this.py") + execfile(ACTIVATE_THIS, dict(__file__=ACTIVATE_THIS)) + __VENV = True + +import glob +from pbPlist import pbPlist + +# Ensure we can use dat2obj.py as a module. +sys.path.insert(1, "..") +from dat2obj import get_sections +# pylint: enable=wrong-import-position + + +def read_plist(f_path): + """Reads a plist file. + :f_path: string: path to the plist file to read. + Returns a pbPlist.PBPlist instance. + """ + assert os.path.isfile(f_path) + return pbPlist.PBPlist(f_path).root.value + + +def get_models_map(plist): + """Builds the models map from plist data. + :plist: dict: plist data to be scanned. + Returns a dict like: + {"model_file_name.dat": "model_entry_name_in_plist"} + """ + models = {} + for name, value in plist.items(): + if "model" in value.keys(): + models[value["model"]] = name + return models + + +def get_tex_aliases(f_name, tex_aliases=None): + """Find the texture names (aliases) in the given .dat file and update the tex_aliases dict. + :f_name: string: The file path to read. + :tex_aliases: dict: Dictionary to update. Defaults to an empty dict. + Returns the updated tex_aliases dict. + """ + if tex_aliases is None: + tex_aliases = {} + with open(f_name, "rU") as f_in: + aliases = get_sections(unicode(f_in.read())).get("NAMES", {}).get("data") + if aliases: + tex_aliases[os.path.basename(f_name)] = aliases + return tex_aliases + + +def write_oti(name, directory, tex_names, plist): + """Writes the .oti file. + :name: string: The .dat file name to write the .oti for. + :directory: string: Where to write the .oti file. + :tex_names: list/tuple: List of the textures names (aliases). The order of elements defines in + which order the real textures names are written in the .oti file. + :plist: dict: Contains the 'model' data extracted from a .plist file. + Retruns nothing. + """ + lines = [] + for tex in tex_names: + lines.append(getattr(plist["materials"].get(tex, {}).get("diffuse_map", tex), "value", tex)) + f_name = os.path.join(directory, os.path.extsep.join((os.path.splitext(name)[0], "oti"))) + with open(f_name, "w") as f_out: + f_out.write("\n".join(lines)) + + +def main(args=None): + """Program bootstrap.""" + print "= Starting build_otis." + if not args: + args = sys.argv[1:] + if __VENV: + print " (Virtual environment in '.venv' activated.)" + if len(args) < 3: + print "! ERROR: Too many CLI arguments." + print "Put at least two positional arguments on CLI:" + print " " + sys.exit(1) + f_plist = args[0] + dat_dir = output_dir = args[1] + if len(args) == 3: + output_dir = args[2] + plist = read_plist(f_plist) + models = get_models_map(plist) + + # Scan the .dat files in the directory given as second CLI arg to get names order. + # Store them in a dict with file base name (without ext) as keys. + names = sorted(glob.glob(os.path.join(dat_dir, "*.dat"))) + tex_aliases = {} + for name in names: + print "* Finding texture aliases for '%s'." % name + tex_aliases = get_tex_aliases(name, tex_aliases) + + # Get the materials information from the .plist file. + for name, aliases in tex_aliases.items(): + model = models.get(name) + if model: + print "* Writing .oti file for '%s'." % name + write_oti(name, output_dir, aliases, plist[model]) + + +if __name__ == '__main__': + main() diff --git a/test/test_dat2obj.bat b/test/test_dat2obj.bat new file mode 100644 index 0000000..82342aa --- /dev/null +++ b/test/test_dat2obj.bat @@ -0,0 +1,9 @@ +@echo off +REM - test_dat2obj.bat +REM - +REM - Script to test dat2obj.py +REM - +REM - D.C.-G. 2018 +REM - +python test_dat2obj.py %* + diff --git a/test/test_dat2obj.py b/test/test_dat2obj.py new file mode 100644 index 0000000..01998ae --- /dev/null +++ b/test/test_dat2obj.py @@ -0,0 +1,460 @@ +#!/bin/env python2 +# +# -*- encoding: utf-8 -*- +# +# test_dat2obj.py +# +# Multi-platform test file for dat2obj.py +# +# (c) D.C.-G. 2018 +# +# Not fitted for any commercial purpose! +# +r""" +This program tests if 'dat2obj.py' can convert Oolite mesh files to Wavefront .obj and .mtl files. + +Supported platforms +------------------- + +* Linux +* Windows + + +Prerequisites +------------- + +* Python 2.7.3 and over (Python 3 not yet supported by this program). + This program and 'dat2obj.py' **may** work with older versions of Python, but, no guaranty. +* An internet connection and your operating system being able to support TLS v1.2 protocol. + The Python package pbPlist is used during the tests, and is not a standard Python package. + It will be installed in a dedicated virtual environment using 'virtualenv' and 'pip'. + You don't need to worry about that, this program will download and install the needed packages + locally. Your system Python installation will not be modified. + See 'Python dependencies' below for details. +* Oolite installed in the default location for your system. + On Linux systems: '$HOME/GNUstep/Applications/Oolite/oolite.app'. + On Windows: 'C:\Oolite\oolite.app'. +* The program 'build_otis.py' in the same folder as this one. + If it is not there, call Houston, and say you've a problem... + + +Scenario +-------- + + refers to the 'oolite.app' folder. +File system paths are given for Linux platforms. + +1. Check and install dependencies. (See 'Python dependencies') +2. Create the 'output_l', 'output_r' and 'cmp_output' directories. +3. Copy all the files starting with 'oolite' and ending with '.dat' found in + '/Resources/Models' are copied in 'output_l'. +4. Needed '.oti' files are generated using 'build_otis.py' program. +5. All '.dat' files in 'output_l' are converted to '.obj' files in 'output_l'. +6. The generated '.obj' files in 'output_l' are converted back to '.dat' one in 'output_left'. +7. Needed '.dat' and '.oti' files are copied to 'output_right'. +8. Convert the '.dat' files in 'output_r' to '.obj' files in 'output_r'. +9. Compare '.obj' files in 'output_l' and 'output_r' and store differences in 'cmp_outpout'. + Files in 'cmp_output' are generated only when diferrences are found. + + +Python dependencies +------------------- + +If the needed Python dependency 'pbPlist' for this test program is not found in your system Python +installation, it is installed in a dedicated virtual environment in the current directory. +The Python package 'virtualenv' version 15.1.0 is used. +If it is not installed on you system Python installation, it will be downloaded and 'installed' only +for the tests. It won't be available for other Python programs. + +The virtual environment directory '.venv' and the one containing 'virtualenv' '.virtualenv' can be +removed safely once the tests are finished. +""" +from __future__ import unicode_literals + +import os +import sys +import glob +import re +import argparse +import importlib +import urllib2 +import shutil +import difflib + +DEBUG = False + + +class Options(object): # pylint: disable=too-few-public-methods + """Stores options. + Options can be accessed using instance attributes or like a dict.""" + def __init__(self, options): + """:options: dict: Options to store.""" + super(Options, self).__setattr__("options", options) + + def __getattribute__(self, key): + """Returns self.option value for key, or self. value. + :key: string: The attribute name to return.""" + options = super(Options, self).__getattribute__("options") + if key in options.keys(): + return options[key] + return super(Options, self).__getattribute__(key) + + def __setattr__(self, key, value): + """Set a value in self.options if not an instance attribute.""" + options = super(Options, self).__getattribute__("options") + super_obj = super(Options, self) + if key not in dir(super_obj): + options[key] = value + else: + super_obj.__setattr__(key, value) + + def __getitem__(self, key): + """Interface to get item like in a dict object.""" + return self.options[key] + + def __setitem__(self, key, value): + """Interface to set item like in a dict object.""" + self.options[key] = value + + def items(self): + """Returns internal dict keys.""" + return super(Options, self).__getattribute__("options").items() + + +def _get_file_names(pattern, src): + """Finds all the file names corresponding to 'pattern' in 'src' directory. + :pattern: string: Pattern to be used with glob.glob. + :src: string: The directory to scan. + Returns a sorted list of strings.""" + ## return sorted([os.path.join(src, a) for a in glob.glob(os.path.join(src, pattern))]) + return sorted(glob.glob(os.path.join(src, pattern))) + +def _get_extractor(name): + """Returns an 'extractor' by loading the corresponding lower case 'name' module. + :name: string: The case sensitive name of the extractor object to get from the non-case + sensitive 'name' module. + This function is appliable only for modules like 'zipfile' and 'tarfile', because they contain + a 'ZipFile' and 'TarFile' object respectively. + Returns 'zipfile.ZipFile' or 'tarfile.open'.""" + mod = importlib.import_module(name.lower()) + extractor_cls = getattr(mod, name) + if hasattr(mod, "open"): + extractor_cls = getattr(mod, "open") + if not extractor_cls: + raise ImportError("Could not import '%s' from '%s'" % (name, mod)) + return extractor_cls + + +def _get_data(f_path): + """Reads a given file and returns its lines. + :f_path: string: Path to the file to read. + Returns a list of strings.""" + with open(f_path, "U") as fin: + data = fin.readlines() + return data + + +def _install_virtualenv(version, name=".virtualenv"): + """'Installs' virtualenv in the current directory. + Actually download the archive and unpack it. + :version: string: Version to be installed. + :name: string: The name to rename the 'virtualenv-' directory. + Defaults to '.virtualenv'.""" + print "* Installing 'virtualenv' v%s locally." % version + base_url = "https://github.com/pypa/virtualenv/archive/%s" % version + if sys.platform == "win32": + ext = "zip" + else: + ext = "tar.gz" + result = urllib2.urlopen(".".join((base_url, ext))) + + arch_name = os.path.abspath(".".join(("virtualenv", ext))) + with open(arch_name, "wb") as vfo: + vfo.write(result.read()) + vfo.close() + + # Select he right module/class to extract the data according to the extension. + extractor = _get_extractor({"tar.gz": "TarFile", "zip": "ZipFile"}[ext]) + arch_obj = extractor(arch_name, mode="r") + arch_obj.extractall() + arch_obj.close() + + # Rename the extracted archive, remove an existing folder before. + if os.path.isdir(name): + shutil.rmtree(name) + os.rename("virtualenv-%s" % version, name) + + +def ensure_pbplist(opts): + """Ensures that the 'pbPlist' module is avaiable. + If it is not found, it is installed using virtualenv in a dedicated virtual environment (.venv) + in the current directory.""" + print "* Checking pbPlist." + try: + pb_plist = importlib.import_module("pbPlist") + del pb_plist + print "* Found!" + except ImportError: + # Find virtualenv. + print "! Not found. Installing..." + inst_virtualenv = False + vver = opts.virtualenv_version + try: + virtualenv = importlib.import_module("virtualenv") + ver = virtualenv.__version__ + if ver != vver: + print "! Wrong version for 'virtualenv': %s found, but %s wanted." % (ver, vver) + inst_virtualenv = True + except ImportError: + # Trigger the installation + print "! Can't find 'virtualenv' in sys.path." + inst_virtualenv = True + + if inst_virtualenv: + _install_virtualenv(vver) + virtualenv = importlib.import_module("virtualenv") + + # Create the virtual environment and activate it. + virtualenv.create_environment(".venv") + execfile(".venv/bin/activate_this.py", dict(__file__=".venv2/bin/activate_this.py")) + + # Install pbPlist + pip = importlib.import_module("pip") + pip.main(args=["install", "pbPlist", "--prefix", sys.prefix]) + print "* pbPlist installed." + + +def init_options(opts): + """Initializes options with default values according to the OS we're running on. + :opts: object: 'Options' instance to initialize. + Returns updated :opts.""" + if sys.platform == "win32": + parts = ("C:\\", "Oolite", "oolite.app") + else: + parts = (os.environ["HOME"], "GNUstep", "Applications", "Oolite", "oolite.app") + opts.oolite_app = oolite_app = os.path.join(*parts) + opts.models_dir = os.path.join(oolite_app, "Resources", "Models") + opts.plist_file = os.path.join(oolite_app, "Resources", "Config", "shipdata.plist") + opts.virtualenv_version = "15.1.0" + return opts + + +def parse_cli(opts): + """Parses the command line options and populate the given 'opts' object. + :opts: object: A 'Options' instance. + Returns updated :opts.""" + cwd = os.getcwd() + ## p_join = os.path.join + arg_parser = argparse.ArgumentParser() + add_arg = arg_parser.add_argument + add_arg("--models-dir", help="The directory where the models .dat files are. Defaults to the " \ + "'Models' directory in the 'Resources' one in Oolite default installation directory", + default=opts.models_dir) + add_arg("--plist-file", help="Read the given .plist file to find texture aliases. Defaults " \ + "the 'shipdata.plist' file in the 'Config' directory in the 'Resources' ones in " \ + "Oolite default installation directory.", + default=opts.plist_file) + add_arg("--left-dir", help="The conversion first pass directory to store files in. Defaults " \ + ## "to 'output_l' in the current directory.", default=p_join(cwd, "output_l")) + "to 'output_l' in the current directory.", default="output_l") + add_arg("--right-dir", help="The conversion second pass directory to store fines in. "\ + ## "Defaults to 'output_r' in the current directory.", default=p_join(cwd, "output_r")) + "Defaults to 'output_r' in the current directory.", default="output_r") + add_arg("--diff-dir", help="The directory to put the potential .diff files in. Defaults to " \ + ## "'cmp_output' in the current directory.", default=p_join(cwd, "cmp_output")) + "'cmp_output' in the current directory.", default="cmp_output") + add_arg("--debug", action="store_true", help="Print some debugging information.", default=False) + args = arg_parser.parse_args() + for key, val in dict(args._get_kwargs()).items(): # pylint: disable=protected-access + setattr(opts, key, val) + return opts + + +def create_dirs(*names): + """Create directories for each name in 'names'. + If a directory alerady exists, it is removed before being re-created. + :names: strings: The names of the directories to create.""" + for name in names: + if os.path.isdir(name): + print "* Removing '%s'." % name + shutil.rmtree(name) + print "* Creating '%s'." % name + os.mkdir(name) + + +def copy_files(pattern, src, dst): + """Copy files corresponding to 'pattern' from 'src' directory to 'dst' one. + :pattern: string: A usable pattern for glob.glob. + :src: string: The directory to copy from. + :dst: string: The directory to copy to. Must exists.""" + print "* Copying '%s' files from '%s' to '%s'." % (pattern, src, dst) + names = _get_file_names(pattern, src) + for name in names: + shutil.copy2(name, dst) + + +def build_otis(plist_file, dat_dir, output_dir): + """Builds '.oti' files using the 'build_otis.py' program. + :plist_file: string: The path to the '.plist' file containing materials information. + :dat_dir: string: The directory where the '.dat' files to scan are. + :output_dir: string: the directory where to write the '.oti' files. Must exists.""" + if globals()['DEBUG']: + print "% DEBUG: build_otis::" + print "% plist_file:", plist_file + print "% dat_dir:", dat_dir + print "% output_dir:", output_dir + oti_builder = importlib.import_module("build_otis") + oti_builder.main(args=(plist_file, dat_dir, output_dir)) + + +def convert_dat(src): + """Converts '.dat' files by calling 'dat2obj.py'. + :src: string: The directory where the '.dat' files are and where the '.obj' and '.mtl' ones will + be written. + Returns the number of files for which the conversion failed.""" + failures = 0 + for name in _get_file_names("*.dat", src): + if globals()['DEBUG']: + print "% DEBUG: convert_dat::" + print "% src:", src + print "% name:", name + failures += min(1, os.system("python %s %s" % (os.path.join("..", "dat2obj.py"), name))) + return failures + + +def convert_obj(src): + """Convert '.obj' files using 'Obj2DatTex.py' or 'Obj2DatTexNorm.py'. + The conversion program is automatically selected according the normals data contained in the + '.obj' files. + :src: string: The directory where the '.obj' files are. + Returns the number of files for which the conversion failed.""" + failures = 0 + cwd = os.getcwd() + sep = os.path.sep + for name in _get_file_names("*.obj", src): + if globals()['DEBUG']: + print "% DEBUG: name (1):", name + # We have to 'relativize' the file path, since Obj2DatTex.py calls 'lower()' method on the + # file path, which is not compatible with case sensitive file systems... + name = re.sub("^%s" % re.escape(r"%s%s" % (cwd, sep)), "", name) + if globals()['DEBUG']: + print "% DEBUG: name (2):", name + prog = "Obj2DatTex.py" + with open(name) as fin: + if re.findall(r"^vn\s", fin.read(), re.M): + prog = "Obj2DatTexNorm.py" + # We need to convert Windows path separators to Unix for Obj2DatTex.py... + if sys.platform == "win32" and prog == "Obj2DatTex.py": + name = name.replace(os.sep, os.altsep) + print "* Calling '%s'." % prog + failures += min(1, os.system("python %s %s" % (os.path.join("..", prog), name))) + return failures + + +def diff_files(left, right, output, pattern): + """Find differences between to sets of files filtered using a pattern. Differences are saved in + individual files. + :left: string: The 'left' folder to get the first set of files. + :right: string: The 'right' folder to get the second set of files. + :output: string: The folder to write the '.diff' files in. Must exists. + :pattern: string: A pattern compatible with glob.glob like '*.foo'." + Returns the number of different files.""" + result = 0 + file_names = sorted(glob.glob(os.path.join(left, pattern))) + + print "* Comparing %s '%s' files." % (len(file_names), pattern) + + b_name = os.path.basename + + for file_name in file_names: + right_name = os.path.join(right, b_name(file_name)) + diff = tuple(difflib.unified_diff(_get_data(file_name), _get_data(right_name), + file_name, right_name)) + + print " * '%s' ('%s' | '%s')" % (b_name(file_name), b_name(left), b_name(right)), + + if diff: + result += 1 + print "FAILED" + # Write the diff to the output file. + o_name = os.path.join(output, os.extsep.join((os.path.splitext(b_name(file_name))[0], + "diff"))) + with open(o_name, "w") as fout: + fout.writelines(diff) + else: + print "OK" + + return result + + +def main(): + """Program bootstrap.""" + print "= Starting test_dat2obj.py" + # ----------------- + # Build the options + # Instanciate the options object + opts = Options({}) + # Populate it with default values. + init_options(opts) + + # Parse the command line and update opts accordingly. + parse_cli(opts) + if opts.debug: + globals()['DEBUG'] = True + print "% DEBUG: options" + for key, value in opts.options.items(): + print "%% %s: %s" % (key, value) + + # ------------- + # Check pbPlist + # If not found, it is automatically installed using a virtual environment. + # If virtualenv is not found, it is 'installed' for the program only. + ensure_pbplist(opts) + + # ---------------------- + # Start the test process + + # Create the directories. + create_dirs(opts.left_dir, opts.right_dir, opts.diff_dir) + + # Copy the needed .dat files to left directory. + copy_files("oolite*.dat", opts.models_dir, opts.left_dir) + + # Build .oti files. + build_otis(opts.plist_file, opts.left_dir, opts.left_dir) + + # Convert .dat to . obj (1/2). + fail_to_obj1 = convert_dat(opts.left_dir) + if fail_to_obj1: + print "Failed to convert %s '.dat' file to '.obj' ones (pass 1)." % fail_to_obj1 + + # Convert back .obj files to .dat ones. + fail_to_dat = convert_obj(opts.left_dir) + if fail_to_obj1: + print "Failed to convert %s '.obj' file to '.dat' ones." % fail_to_dat + + # Copy needed .dat and .oti files from left to right directory. + copy_files("*.dat", opts.left_dir, opts.right_dir) + copy_files("*.oti", opts.left_dir, opts.right_dir) + + # Convert new .dat files to .obj ones (2/2). + fail_to_obj2 = convert_dat(opts.right_dir) + if fail_to_obj1: + print "Failed to convert %s '.dat' file to '.obj' ones (pass 2)." % fail_to_obj2 + + if fail_to_obj1 != fail_to_obj2: + print "Second dat2obj pass did not gave the same converted file number:" + print "First: %s, second: %s." % (fail_to_obj1, fail_to_obj2) + + # Compare converted files. + failed_diffs = diff_files(opts.left_dir, opts.right_dir, opts.diff_dir, "*.obj") + if failed_diffs: + print "Comparison did not pass for %s .obj files." % failed_diffs + print "See '.diff' file in '%s'." % opts.diff_dir + print "Test failed!" + else: + print "Tests successful!" + + +if __name__ == "__main__": + main() diff --git a/test/test_dat2obj.sh b/test/test_dat2obj.sh new file mode 100755 index 0000000..67fd8f9 --- /dev/null +++ b/test/test_dat2obj.sh @@ -0,0 +1,8 @@ +#!/bin/bash +# +# test_dat2obj.sh +# +# Script to test dat2obj.py +# +python test_dat2obj.py $@ +