| Home | Trees | Indices | Help |
|
|---|
|
|
1 """Spells for optimizing nif files.
2
3 .. autoclass:: SpellCleanRefLists
4 :show-inheritance:
5 :members:
6
7 .. autoclass:: SpellMergeDuplicates
8 :show-inheritance:
9 :members:
10
11 .. autoclass:: SpellOptimizeGeometry
12 :show-inheritance:
13 :members:
14
15 .. autoclass:: SpellOptimize
16 :show-inheritance:
17 :members:
18
19 .. autoclass:: SpellDelUnusedBones
20 :show-inheritance:
21 :members:
22
23 """
24
25 # --------------------------------------------------------------------------
26 # ***** BEGIN LICENSE BLOCK *****
27 #
28 # Copyright (c) 2007-2011, NIF File Format Library and Tools.
29 # All rights reserved.
30 #
31 # Redistribution and use in source and binary forms, with or without
32 # modification, are permitted provided that the following conditions
33 # are met:
34 #
35 # * Redistributions of source code must retain the above copyright
36 # notice, this list of conditions and the following disclaimer.
37 #
38 # * Redistributions in binary form must reproduce the above
39 # copyright notice, this list of conditions and the following
40 # disclaimer in the documentation and/or other materials provided
41 # with the distribution.
42 #
43 # * Neither the name of the NIF File Format Library and Tools
44 # project nor the names of its contributors may be used to endorse
45 # or promote products derived from this software without specific
46 # prior written permission.
47 #
48 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
49 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
50 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
51 # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
52 # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
53 # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
54 # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
55 # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
56 # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
57 # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
58 # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
59 # POSSIBILITY OF SUCH DAMAGE.
60 #
61 # ***** END LICENSE BLOCK *****
62 # --------------------------------------------------------------------------
63
64 from itertools import izip
65 import os.path # exists
66
67 from pyffi.formats.nif import NifFormat
68 from pyffi.utils import unique_map
69 import pyffi.utils.tristrip
70 import pyffi.utils.vertex_cache
71 import pyffi.spells
72 import pyffi.spells.nif
73 import pyffi.spells.nif.fix
74 import pyffi.spells.nif.modify
75
76 # localization
77 #import gettext
78 #_ = gettext.translation('pyffi').ugettext
79 _ = lambda msg: msg # stub, for now
80
81 # set flag to overwrite files
82 __readonly__ = False
83
84 # example usage
85 __examples__ = """* Standard usage:
86
87 python niftoaster.py optimize /path/to/copy/of/my/nifs
88
89 * Optimize, but do not merge NiMaterialProperty blocks:
90
91 python niftoaster.py optimize --exclude=NiMaterialProperty /path/to/copy/of/my/nifs
92 """
95 """Remove empty and duplicate entries in reference lists."""
96
97 SPELLNAME = "opt_cleanreflists"
98 READONLY = False
99
101 # see MadCat221's metstaff.nif:
102 # merging data on PSysMeshEmitter affects particle system
103 # so do not merge child links on this nif (probably we could still
104 # merge other things: this is just a quick hack to make sure the
105 # optimizer won't do anything wrong)
106 try:
107 if self.data.header.has_block_type(NifFormat.NiPSysMeshEmitter):
108 return False
109 except ValueError:
110 # when in doubt, assume it does not have this block
111 pass
112 # so far, only reference lists in NiObjectNET blocks, NiAVObject
113 # blocks, and NiNode blocks are checked
114 return self.inspectblocktype(NifFormat.NiObjectNET)
115
119
123
125 """Return a cleaned copy of the given list of references."""
126 # delete empty and duplicate references
127 cleanlist = []
128 for ref in reflist:
129 if ref is None:
130 self.toaster.msg("removing empty %s reference" % category)
131 self.changed = True
132 elif ref in cleanlist:
133 self.toaster.msg("removing duplicate %s reference" % category)
134 self.changed = True
135 else:
136 cleanlist.append(ref)
137 # done
138 return cleanlist
139
141 if isinstance(branch, NifFormat.NiObjectNET):
142 # clean extra data
143 branch.set_extra_datas(
144 self.cleanreflist(branch.get_extra_datas(), "extra"))
145 if isinstance(branch, NifFormat.NiAVObject):
146 # clean properties
147 branch.set_properties(
148 self.cleanreflist(branch.get_properties(), "property"))
149 if isinstance(branch, NifFormat.NiNode):
150 # clean children
151 branch.set_children(
152 self.cleanreflist(branch.get_children(), "child"))
153 # clean effects
154 branch.set_effects(
155 self.cleanreflist(branch.get_effects(), "effect"))
156 # always recurse further
157 return True
158
160 """Remove duplicate branches."""
161
162 SPELLNAME = "opt_mergeduplicates"
163 READONLY = False
164
166 pyffi.spells.nif.NifSpell.__init__(self, *args, **kwargs)
167 # list of all branches visited so far
168 self.branches = []
169
171 # see MadCat221's metstaff.nif:
172 # merging data on PSysMeshEmitter affects particle system
173 # so do not merge shapes on this nif (probably we could still
174 # merge other things: this is just a quick hack to make sure the
175 # optimizer won't do anything wrong)
176 try:
177 return not self.data.header.has_block_type(
178 NifFormat.NiPSysMeshEmitter)
179 except ValueError:
180 # when in doubt, do the spell
181 return True
182
184 # only inspect the NiObjectNET branch (merging havok can mess up things)
185 return isinstance(branch, (NifFormat.NiObjectNET,
186 NifFormat.NiGeometryData))
187
189 for otherbranch in self.branches:
190 if (branch is not otherbranch and
191 branch.is_interchangeable(otherbranch)):
192 # skip properties that have controllers (the
193 # controller data cannot always be reliably checked,
194 # see also issue #2106668)
195 if (isinstance(branch, NifFormat.NiProperty)
196 and branch.controller):
197 continue
198 # skip BSShaderProperty blocks (see niftools issue #3009832)
199 if isinstance(branch, NifFormat.BSShaderProperty):
200 continue
201 # interchangeable branch found!
202 self.toaster.msg("removing duplicate branch")
203 self.data.replace_global_node(branch, otherbranch)
204 self.changed = True
205 # branch has been replaced, so no need to recurse further
206 return False
207 else:
208 # no duplicate found, add to list of visited branches
209 self.branches.append(branch)
210 # continue recursion
211 return True
212
214 """Optimize all geometries:
215 - remove duplicate vertices
216 - triangulate
217 - recalculate skin partition
218 - recalculate tangent space
219 """
220
221 SPELLNAME = "opt_geometry"
222 READONLY = False
223
224 # spell parameters
225 VERTEXPRECISION = 3
226 NORMALPRECISION = 3
227 UVPRECISION = 5
228 VCOLPRECISION = 3
229
231 pyffi.spells.nif.NifSpell.__init__(self, *args, **kwargs)
232 # list of all optimized geometries so far
233 # (to avoid optimizing the same geometry twice)
234 self.optimized = []
235
237 # do not optimize if an egm or tri file is detected
238 filename = self.stream.name
239 if (os.path.exists(filename[:-3] + "egm")
240 or os.path.exists(filename[:-3] + "tri")):
241 return False
242 # so far, only reference lists in NiObjectNET blocks, NiAVObject
243 # blocks, and NiNode blocks are checked
244 return self.inspectblocktype(NifFormat.NiTriBasedGeom)
245
249
251 self.toaster.msg("removing duplicate vertices")
252 # get map, deleting unused vertices
253 return unique_map(
254 vhash
255 for i, vhash in enumerate(data.get_vertex_hash_generator(
256 vertexprecision=self.VERTEXPRECISION,
257 normalprecision=self.NORMALPRECISION,
258 uvprecision=self.UVPRECISION,
259 vcolprecision=self.VCOLPRECISION)))
260
262 """Optimize a NiTriStrips or NiTriShape block:
263 - remove duplicate vertices
264 - retriangulate for vertex cache
265 - recalculate skin partition
266 - recalculate tangent space
267
268 @todo: Limit the length of strips (see operation optimization mod for
269 Oblivion!)
270 """
271 if not isinstance(branch, NifFormat.NiTriBasedGeom):
272 # keep recursing
273 return True
274
275 if branch in self.optimized:
276 # already optimized
277 return False
278
279 if branch.data.additional_data:
280 # occurs in fallout nv
281 # not sure how to deal with additional data
282 # so skipping to be on the safe side
283 self.toaster.msg(
284 "mesh has additional geometry data"
285 " which is not well understood: not optimizing")
286 return False
287
288 # we found a geometry to optimize
289
290 # we're going to change the data
291 self.changed = True
292
293 # cover degenerate case
294 if branch.data.num_vertices < 3 or branch.data.num_triangles == 0:
295 self.toaster.msg(
296 "less than 3 vertices or no triangles: removing branch")
297 self.data.replace_global_node(branch, None)
298 return False
299
300 # shortcut
301 data = branch.data
302
303 v_map, v_map_inverse = self.optimize_vertices(data)
304
305 new_numvertices = len(v_map_inverse)
306 self.toaster.msg("(num vertices was %i and is now %i)"
307 % (len(v_map), new_numvertices))
308
309 # optimizing triangle ordering
310 # first, get new triangle indices, with duplicate vertices removed
311 triangles = list(pyffi.utils.vertex_cache.get_unique_triangles(
312 (v_map[v0], v_map[v1], v_map[v2])
313 for v0, v1, v2 in data.get_triangles()))
314 old_atvr = pyffi.utils.vertex_cache.average_transform_to_vertex_ratio(
315 triangles)
316 self.toaster.msg("optimizing triangle ordering")
317 new_triangles = pyffi.utils.vertex_cache.get_cache_optimized_triangles(
318 triangles)
319 new_atvr = pyffi.utils.vertex_cache.average_transform_to_vertex_ratio(
320 new_triangles)
321 if new_atvr < old_atvr:
322 triangles = new_triangles
323 self.toaster.msg(
324 "(ATVR reduced from %.3f to %.3f)" % (old_atvr, new_atvr))
325 else:
326 self.toaster.msg(
327 "(ATVR stable at %.3f)" % old_atvr)
328 # optimize triangles to have sequentially ordered indices
329 self.toaster.msg("optimizing vertex ordering")
330 v_map_opt = pyffi.utils.vertex_cache.get_cache_optimized_vertex_map(
331 triangles)
332 triangles = [(v_map_opt[v0], v_map_opt[v1], v_map_opt[v2])
333 for v0, v1, v2 in triangles]
334 # update vertex map and its inverse
335 for i in xrange(data.num_vertices):
336 try:
337 v_map[i] = v_map_opt[v_map[i]]
338 except IndexError:
339 # found a trailing vertex which is not used
340 v_map[i] = None
341 if v_map[i] is not None:
342 v_map_inverse[v_map[i]] = i
343 else:
344 self.toaster.logger.warn("unused vertex")
345 try:
346 new_numvertices = max(v for v in v_map if v is not None) + 1
347 except ValueError:
348 # max() arg is an empty sequence
349 # this means that there are no vertices
350 self.toaster.msg(
351 "less than 3 vertices or no triangles: removing branch")
352 self.data.replace_global_node(branch, None)
353 return False
354 del v_map_inverse[new_numvertices:]
355
356 # use a triangle representation
357 if not isinstance(branch, NifFormat.NiTriShape):
358 self.toaster.msg("replacing branch by NiTriShape")
359 newbranch = branch.get_interchangeable_tri_shape(
360 triangles=triangles)
361 self.data.replace_global_node(branch, newbranch)
362 branch = newbranch
363 data = newbranch.data
364 else:
365 data.set_triangles(triangles)
366
367 # copy old data
368 oldverts = [[v.x, v.y, v.z] for v in data.vertices]
369 oldnorms = [[n.x, n.y, n.z] for n in data.normals]
370 olduvs = [[[uv.u, uv.v] for uv in uvset] for uvset in data.uv_sets]
371 oldvcols = [[c.r, c.g, c.b, c.a] for c in data.vertex_colors]
372 if branch.skin_instance: # for later
373 oldweights = branch.get_vertex_weights()
374 # set new data
375 data.num_vertices = new_numvertices
376 if data.has_vertices:
377 data.vertices.update_size()
378 for i, v in enumerate(data.vertices):
379 old_i = v_map_inverse[i]
380 v.x = oldverts[old_i][0]
381 v.y = oldverts[old_i][1]
382 v.z = oldverts[old_i][2]
383 if data.has_normals:
384 data.normals.update_size()
385 for i, n in enumerate(data.normals):
386 old_i = v_map_inverse[i]
387 n.x = oldnorms[old_i][0]
388 n.y = oldnorms[old_i][1]
389 n.z = oldnorms[old_i][2]
390 # XXX todo: if ...has_uv_sets...:
391 data.uv_sets.update_size()
392 for j, uvset in enumerate(data.uv_sets):
393 for i, uv in enumerate(uvset):
394 old_i = v_map_inverse[i]
395 uv.u = olduvs[j][old_i][0]
396 uv.v = olduvs[j][old_i][1]
397 if data.has_vertex_colors:
398 data.vertex_colors.update_size()
399 for i, c in enumerate(data.vertex_colors):
400 old_i = v_map_inverse[i]
401 c.r = oldvcols[old_i][0]
402 c.g = oldvcols[old_i][1]
403 c.b = oldvcols[old_i][2]
404 c.a = oldvcols[old_i][3]
405 del oldverts
406 del oldnorms
407 del olduvs
408 del oldvcols
409
410 # update skin data
411 if branch.skin_instance:
412 self.toaster.msg("update skin data vertex mapping")
413 skindata = branch.skin_instance.data
414 newweights = []
415 for i in xrange(new_numvertices):
416 newweights.append(oldweights[v_map_inverse[i]])
417 for bonenum, bonedata in enumerate(skindata.bone_list):
418 w = []
419 for i, weightlist in enumerate(newweights):
420 for bonenum_i, weight_i in weightlist:
421 if bonenum == bonenum_i:
422 w.append((i, weight_i))
423 bonedata.num_vertices = len(w)
424 bonedata.vertex_weights.update_size()
425 for j, (i, weight_i) in enumerate(w):
426 bonedata.vertex_weights[j].index = i
427 bonedata.vertex_weights[j].weight = weight_i
428
429 # update skin partition (only if branch already exists)
430 if branch.get_skin_partition():
431 self.toaster.msg("updating skin partition")
432 if isinstance(branch.skin_instance,
433 NifFormat.BSDismemberSkinInstance):
434 # get body part indices (in the old system!)
435 triangles, trianglepartmap = (
436 branch.skin_instance.get_dismember_partitions())
437 maximize_bone_sharing = True
438 # update mapping
439 new_triangles = []
440 new_trianglepartmap = []
441 for triangle, trianglepart in izip(triangles, trianglepartmap):
442 new_triangle = tuple(v_map[i] for i in triangle)
443 # it could happen that v_map[i] is None
444 # these triangles are skipped
445 # see for instance
446 # falloutnv/meshes/armor/greatkhans/greatkhan_v3.nif
447 # falloutnv/meshes/armor/tunnelsnake01/m/outfitm.nif
448 if None not in new_triangle:
449 new_triangles.append(new_triangle)
450 new_trianglepartmap.append(trianglepart)
451 triangles = new_triangles
452 trianglepartmap = new_trianglepartmap
453 else:
454 # no body parts
455 triangles = None
456 trianglepartmap = None
457 maximize_bone_sharing = False
458 # use Oblivion settings
459 branch.update_skin_partition(
460 maxbonesperpartition=18, maxbonespervertex=4,
461 stripify=False, verbose=0,
462 triangles=triangles, trianglepartmap=trianglepartmap,
463 maximize_bone_sharing=maximize_bone_sharing)
464
465 # update morph data
466 for morphctrl in branch.get_controllers():
467 if isinstance(morphctrl, NifFormat.NiGeomMorpherController):
468 morphdata = morphctrl.data
469 # skip empty morph data
470 if not morphdata:
471 continue
472 # convert morphs
473 self.toaster.msg("updating morphs")
474 # check size and fix it if needed
475 # (see issue #3395484 reported by rlibiez)
476 # remap of morph vertices works only if
477 # morph.num_vertices == len(v_map)
478 if morphdata.num_vertices != len(v_map):
479 self.toaster.logger.warn(
480 "number of vertices in morph ({0}) does not match"
481 " number of vertices in shape ({1}):"
482 " resizing morph, graphical glitches might result"
483 .format(morphdata.num_vertices, len(v_map)))
484 morphdata.num_vertices = len(v_map)
485 for morph in morphdata.morphs:
486 morph.arg = morphdata.num_vertices # manual argument passing
487 morph.vectors.update_size()
488 # now remap morph vertices
489 for morph in morphdata.morphs:
490 # store a copy of the old vectors
491 oldmorphvectors = [(vec.x, vec.y, vec.z)
492 for vec in morph.vectors]
493 for old_i, vec in izip(v_map_inverse, morph.vectors):
494 vec.x = oldmorphvectors[old_i][0]
495 vec.y = oldmorphvectors[old_i][1]
496 vec.z = oldmorphvectors[old_i][2]
497 del oldmorphvectors
498 # resize matrices
499 morphdata.num_vertices = new_numvertices
500 for morph in morphdata.morphs:
501 morph.arg = morphdata.num_vertices # manual argument passing
502 morph.vectors.update_size()
503
504 # recalculate tangent space (only if the branch already exists)
505 if (branch.find(block_name='Tangent space (binormal & tangent vectors)',
506 block_type=NifFormat.NiBinaryExtraData)
507 or (data.num_uv_sets & 61440)
508 or (data.bs_num_uv_sets & 61440)):
509 self.toaster.msg("recalculating tangent space")
510 branch.update_tangent_space()
511
512 # stop recursion
513 return False
514
517 """Optimize geometry by splitting large models into pieces.
518 (This spell is not yet fully implemented!)
519 """
520 SPELLNAME = "opt_split"
521 READONLY = False
522 THRESHOLD_RADIUS = 100 #: Threshold where to split geometry.
523
524 # XXX todo
525 @staticmethod
527 """Add a vertex from source to destination. Returns index in
528 destdata of the vertex."""
529 # v_map maps source indices that have already been added to the
530 # index they already have in the destdata
531
532 # has_normals, num_uv_sets, etc. of destdata must already match
533 # the sourcedata
534 try:
535 return v_map[sourceindex]
536 except KeyError:
537 v_map[sourceindex] = destdata.num_vertices
538 destdata.num_vertices += 1
539 destdata.vertices.update_size()
540 destdata.vertices[-1].x = sourcedata.vertices[sourceindex].x
541 destdata.vertices[-1].y = sourcedata.vertices[sourceindex].y
542 destdata.vertices[-1].z = sourcedata.vertices[sourceindex].z
543 if sourcedata.has_normals:
544 destdata.normals.update_size()
545 destdata.normals[-1].x = sourcedata.normals[sourceindex].x
546 destdata.normals[-1].y = sourcedata.normals[sourceindex].y
547 destdata.normals[-1].z = sourcedata.normals[sourceindex].z
548 if sourcedata.has_vertex_colors:
549 destdata.vertex_colors.update_size()
550 destdata.vertex_colors[-1].r = sourcedata.vertex_colors[sourceindex].r
551 destdata.vertex_colors[-1].g = sourcedata.vertex_colors[sourceindex].g
552 destdata.vertex_colors[-1].b = sourcedata.vertex_colors[sourceindex].b
553 destdata.vertex_colors[-1].a = sourcedata.vertex_colors[sourceindex].a
554 if sourcedata.has_uv:
555 for sourceuvset, destuvset in izip(sourcedata.uv_sets, destdata.uv_sets):
556 destuvset.update_size()
557 destuvset[-1].u = sourceuvset[sourceindex].u
558 destuvset[-1].v = sourceuvset[sourceindex].v
559 return destdata.num_vertices
560
561 # XXX todo
562 @staticmethod
564 """Add a triangle from source to destination."""
565 desttriangle = [
566 destdata.addVertex(sourceindex)
567 for sourceindex in sourcetriangle]
568 destdata.num_triangles += 1
569 destdata.triangles.update_size()
570 destdata.triangles[-1].v_1 = desttriangle[0]
571 destdata.triangles[-1].v_2 = desttriangle[0]
572 destdata.triangles[-1].v_3 = desttriangle[0]
573
574 # XXX todo
575 @staticmethod
577 """Calculate size of geometry data + given triangle."""
578 def helper(oper, coord):
579 return oper((getattr(vert, coord) for vert in triangle),
580 oper(getattr(vert, coord) for vert in vertices))
581 minx = helper(min, "x")
582 miny = helper(min, "y")
583 minz = helper(min, "z")
584 maxx = helper(max, "x")
585 maxy = helper(max, "y")
586 maxz = helper(max, "z")
587 return max((maxx - minx, maxy - miny, maxz - minz))
588
589 # XXX todo: merge into branchentry spell
590 @staticmethod
592 """Takes a NiGeometry block and splits the geometries. Returns a NiNode
593 which contains the splitted geometry. Note that everything is triangulated
594 in the process."""
595 # make list of triangles
596 # this will be used as the list of triangles still to add
597 triangles = geom.data.get_triangles()
598 node = NifFormat.NiNode().deepcopy(
599 NifFormat.NiAVObject.deepcopy(geom))
600 geomsplit = None
601 # while there are still triangles to add...
602 while triangles:
603 if geomsplit is None:
604 # split new geometry
605 geomsplit = NifFormat.NiTriShape()
606 node.add_child(geomsplit)
607 geomsplit.data = NifFormat.NiTriShapeData()
608 v_map = {}
609 # copy relevant data
610 geomsplit.name = "%s:%i" % (geom.name, node.num_children - 1)
611 geomsplit.data.has_vertices = geom.data.has_vertices
612 geomsplit.data.has_normals = geom.data.has_normals
613 geomsplit.data.has_vertex_colors = geom.data.has_vertex_colors
614 geomsplit.data.num_uv_sets = geom.data.num_uv_sets
615 geomsplit.data.has_uv = geom.data.has_uv
616 geomsplit.data.uv_sets.update_size()
617 # assign it a random triangle
618 triangle = triangles.pop(0)
619 addTriangle(triangle, v_map, geom.data, geomsplit.data)
620 # find face that is close to current geometry
621 for triangle in triangles:
622 if get_size(geomsplit.data,
623 tuple(geom.data.vertices[index]
624 for index in triangle)) < threshold_radius:
625 addTriangle(triangle, v_map, geom.data, geomsplit.data)
626 break
627 else:
628 # if exceeded, start new geometry
629 # first finish some things in geomsplit data
630 geomsplit.data.update_center_radius()
631 # setting geomsplit to None flags this for
632 # the next iteration
633 geomsplit = None
634 # return grouping node
635 return node
636
638 pyffi.spells.nif.NifSpell.__init__(self, *args, **kwargs)
639 # list of all optimized geometries so far
640 # (to avoid optimizing the same geometry twice)
641 self.optimized = []
642
645
648
650 if not isinstance(branch, NifFormat.NiTriBasedGeom):
651 # keep recursing
652 return True
653
654 if branch in self.optimized:
655 # already optimized
656 return False
657
658 # we found a geometry to optimize
659 # XXX todo
660 # get geometry data
661 geomdata = block.data
662 if not geomdata:
663 self.optimized.append(block)
664 return False
665 # check radius
666 if geomdata.radius < self.THRESHOLD_RADIUS:
667 optimized_geometries.append(block)
668 return False
669 # radius is over the threshold, so re-organize the geometry
670 newblock = split(block, threshold_radius = THRESHOLD_RADIUS)
671 # replace block with newblock everywhere
672 data.replace_global_node(block, newblock)
673
674 self.optimized.append(block)
675
676 # stop recursing
677 return False
678
680 """Remove nodes that are not used for anything."""
681
682 SPELLNAME = "opt_delunusedbones"
683 READONLY = False
684
686 # only run the spell if there are skinned geometries
687 return self.inspectblocktype(NifFormat.NiSkinInstance)
688
690 # make list of used bones
691 self._used_bones = set()
692 for branch in self.data.get_global_iterator():
693 if isinstance(branch, NifFormat.NiGeometry):
694 if branch.skin_instance:
695 self._used_bones |= set(branch.skin_instance.bones)
696 return True
697
701
703 if isinstance(branch, NifFormat.NiNode):
704 if ((not branch.children)
705 and (not branch.collision_object)
706 and (branch not in self._used_bones)):
707 self.toaster.msg("removing unreferenced bone")
708 self.data.replace_global_node(branch, None)
709 self.changed = True
710 # no need to recurse further
711 return False
712 return True
713
715 """Remove nodes with zero scale."""
716
717 SPELLNAME = "opt_delzeroscale"
718 READONLY = False
719
721 # only run the spell if there are scaled objects
722 return self.inspectblocktype(NifFormat.NiAVObject)
723
727
729 if isinstance(branch, NifFormat.NiAVObject):
730 if branch.scale == 0:
731 self.toaster.msg("removing zero scaled branch")
732 self.data.replace_global_node(branch, None)
733 self.changed = True
734 # no need to recurse further
735 return False
736 return True
737
739 """Reduce vertices of all geometries."""
740
741 SPELLNAME = "opt_reducegeometry"
742 READONLY = False
743
744 @classmethod
746 if not toaster.options["arg"]:
747 toaster.logger.warn(
748 "must specify degree of reduction as argument "
749 "(e.g. 2 to reduce a little, 1 to reduce more, "
750 "0 to reduce even more, -0.1 is usually the highest "
751 "level of optimization possible before significant "
752 " graphical oddities occur) to to apply spell")
753 return False
754 else:
755 precision = float(toaster.options["arg"])
756 cls.VERTEXPRECISION = precision
757 cls.NORMALPRECISION = max(precision, 0)
758 cls.UVPRECISION = max(precision, 0)
759 cls.VCOLPRECISION = max(precision, 0)
760 return True
761
763 """Optimize collision geometries by converting shapes to primitive
764 boxes where appropriate.
765 """
766 SPELLNAME = "opt_collisionbox"
767 READONLY = False
768 VERTEXPRECISION = 3
769
771 pyffi.spells.nif.NifSpell.__init__(self, *args, **kwargs)
772 # list of all optimized geometries so far
773 # (to avoid optimizing the same geometry twice)
774 self.optimized = []
775
777 # only run the spell if there are collisions
778 return (
779 self.inspectblocktype(NifFormat.bhkPackedNiTriStripsShape)
780 or self.inspectblocktype(NifFormat.bhkNiTriStripsShape))
781
783 # only inspect the collision branches
784 return isinstance(branch, (NifFormat.NiAVObject,
785 NifFormat.bhkCollisionObject,
786 NifFormat.bhkRigidBody,
787 NifFormat.bhkMoppBvTreeShape))
788
790 """Check if the given shape is has a box shape. If so, return an
791 equivalent (bhkConvexTransformShape +) bhkBoxShape.
792
793 Shape should be a bhkPackedNiTriStripsShape or a bhkNiTriStripsShape.
794 """
795 PRECISION = 100
796
797 # get vertices, triangles, and material
798 if isinstance(shape, NifFormat.bhkPackedNiTriStripsShape):
799 # multimaterial? cannot use a box
800 if len(shape.get_sub_shapes()) != 1:
801 return None
802 vertices = shape.data.vertices
803 triangles = [
804 (hk_triangle.triangle.v_1,
805 hk_triangle.triangle.v_2,
806 hk_triangle.triangle.v_3)
807 for hk_triangle in shape.data.triangles]
808 material = shape.get_sub_shapes()[0].material
809 factor = 1.0
810 elif isinstance(shape, NifFormat.bhkNiTriStripsShape):
811 if shape.num_strips_data != 1:
812 return None
813 vertices = shape.strips_data[0].vertices
814 triangles = shape.strips_data[0].get_triangles()
815 material = shape.material
816 factor = 7.0
817 # check triangles
818 if len(triangles) != 12:
819 return None
820 # sorted vertices of a unit box
821 unit_box = [(0, 0, 0), (0, 0, 1), (0, 1, 0), (0, 1, 1),
822 (1, 0, 0), (1, 0, 1), (1, 1, 0), (1, 1, 1)]
823 # sort vertices and rescale them to fit in the unit box
824 verts = sorted(list(vert.as_tuple() for vert in vertices))
825 min_ = [min(vert[i] for vert in verts) for i in range(3)]
826 size = [max(vert[i] for vert in verts) - min_[i] for i in range(3)]
827 if any((s < 1e-10) for s in size):
828 # one of the dimensions is zero, so not a box
829 return None
830 scaled_verts = sorted(
831 set(tuple(int(0.5 + PRECISION * ((vert[i] - min_[i]) / size[i]))
832 for i in range(3))
833 for vert in verts))
834 # if our vertices are a box, then the scaled_verts should coincide with
835 # unit_box
836 if len(scaled_verts) != 8:
837 # cannot be a box
838 return None
839 non_boxiness = sum(sum(abs(PRECISION * vert[i] - othervert[i])
840 for i in range(3))
841 for vert, othervert in zip(unit_box, scaled_verts))
842 if non_boxiness > 0:
843 # not really a box, so return nothing
844 return None
845 # it is a box! replace by a bhkBoxShape
846 boxshape = NifFormat.bhkBoxShape()
847 boxshape.dimensions.x = size[0] / (2 * factor)
848 boxshape.dimensions.y = size[1] / (2 * factor)
849 boxshape.dimensions.z = size[2] / (2 * factor)
850 boxshape.minimum_size = min(size) / factor
851 try:
852 boxshape.material = material
853 except ValueError:
854 # material has a bad value, this sometimes happens
855 pass
856 boxshape.radius = 0.1
857 boxshape.unknown_8_bytes[0] = 0x6b
858 boxshape.unknown_8_bytes[1] = 0xee
859 boxshape.unknown_8_bytes[2] = 0x43
860 boxshape.unknown_8_bytes[3] = 0x40
861 boxshape.unknown_8_bytes[4] = 0x3a
862 boxshape.unknown_8_bytes[5] = 0xef
863 boxshape.unknown_8_bytes[6] = 0x8e
864 boxshape.unknown_8_bytes[7] = 0x3e
865 # check translation
866 mid = [min_[i] + 0.5 * size[i] for i in range(3)]
867 if sum(abs(mid[i]) for i in range(3)) < 1e-6:
868 # no transform needed
869 return boxshape
870 else:
871 # create transform block
872 tfshape = NifFormat.bhkConvexTransformShape()
873 tfshape.shape = boxshape
874 tfshape.material = boxshape.material
875 tfshape.transform.m_14 = mid[0] / factor
876 tfshape.transform.m_24 = mid[1] / factor
877 tfshape.transform.m_34 = mid[2] / factor
878 return tfshape
879
881 """Optimize a vertex based collision block:
882 - remove duplicate vertices
883 - rebuild triangle indice and welding info
884 - update MOPP data if applicable.
885 """
886 if branch in self.optimized:
887 # already optimized
888 return False
889
890 if (isinstance(branch, NifFormat.bhkMoppBvTreeShape)
891 and isinstance(branch.shape, NifFormat.bhkPackedNiTriStripsShape)
892 and isinstance(branch.shape.data,
893 NifFormat.hkPackedNiTriStripsData)):
894 # packed collision with mopp
895 box_shape = self.get_box_shape(branch.shape)
896 if box_shape:
897 # it is a box, replace bhkMoppBvTreeShape
898 self.data.replace_global_node(branch, box_shape)
899 self.toaster.msg(_("optimized box collision"))
900 self.changed = True
901 self.optimized.append(branch)
902 return False # don't recurse farther
903 elif (isinstance(branch, NifFormat.bhkRigidBody)
904 and isinstance(branch.shape, NifFormat.bhkNiTriStripsShape)):
905 # unpacked collision
906 box_shape = self.get_box_shape(branch.shape)
907 if box_shape:
908 # it is a box, replace bhkNiTriStripsShape
909 self.data.replace_global_node(branch.shape, box_shape)
910 self.toaster.msg(_("optimized box collision"))
911 self.changed = True
912 self.optimized.append(branch)
913 # don't recurse further
914 return False
915 elif (isinstance(branch, NifFormat.bhkRigidBody)
916 and isinstance(branch.shape,
917 NifFormat.bhkPackedNiTriStripsShape)):
918 # packed collision without mopp
919 box_shape = self.get_box_shape(branch.shape)
920 if box_shape:
921 # it's a box, replace bhkPackedNiTriStripsShape
922 self.data.replace_global_node(branch.shape, box_shape)
923 self.toaster.msg(_("optimized box collision"))
924 self.changed = True
925 self.optimized.append(branch)
926 return False
927 #keep recursing
928 return True
929
931 """Optimize collision geometries by removing duplicate vertices."""
932
933 SPELLNAME = "opt_collisiongeometry"
934 READONLY = False
935 VERTEXPRECISION = 3
936
938 pyffi.spells.nif.NifSpell.__init__(self, *args, **kwargs)
939 # list of all optimized geometries so far
940 # (to avoid optimizing the same geometry twice)
941 self.optimized = []
942
944 # only run the spell if there are collisions
945 return (
946 self.inspectblocktype(NifFormat.bhkPackedNiTriStripsShape)
947 or self.inspectblocktype(NifFormat.bhkNiTriStripsShape))
948
950 # only inspect the collision branches
951 return isinstance(branch, (NifFormat.NiAVObject,
952 NifFormat.bhkCollisionObject,
953 NifFormat.bhkRigidBody,
954 NifFormat.bhkMoppBvTreeShape))
955
957 """Optimize a bhkMoppBvTreeShape."""
958 shape = mopp.shape
959 data = shape.data
960
961 # removing duplicate vertices
962 self.toaster.msg(_("removing duplicate vertices"))
963 # make a joint map for all subshapes
964 # while doing this, also update subshape vertex count
965 full_v_map = []
966 full_v_map_inverse = []
967 subshape_counts = []
968 for subshape_index in range(len(shape.get_sub_shapes())):
969 self.toaster.msg(_("(processing subshape %i)")
970 % subshape_index)
971 v_map, v_map_inverse = unique_map(
972 shape.get_vertex_hash_generator(
973 vertexprecision=self.VERTEXPRECISION,
974 subshape_index=subshape_index))
975 self.toaster.msg(
976 _("(num vertices in collision shape was %i and is now %i)")
977 % (len(v_map), len(v_map_inverse)))
978 # update subshape vertex count
979 subshape_counts.append(len(v_map_inverse))
980 # update full maps
981 num_vertices = len(full_v_map_inverse)
982 old_num_vertices = len(full_v_map)
983 full_v_map += [num_vertices + i
984 for i in v_map]
985 full_v_map_inverse += [old_num_vertices + old_i
986 for old_i in v_map_inverse]
987 # copy old data
988 oldverts = [[v.x, v.y, v.z] for v in data.vertices]
989 # set new subshape counts
990 for subshape_index, subshape_count in enumerate(subshape_counts):
991 if shape.sub_shapes:
992 # oblivion subshapes
993 shape.sub_shapes[subshape_index].num_vertices = subshape_count
994 if shape.data.sub_shapes:
995 # fallout 3 subshapes
996 shape.data.sub_shapes[subshape_index].num_vertices = subshape_count
997 # set new data
998 data.num_vertices = len(full_v_map_inverse)
999 data.vertices.update_size()
1000 for old_i, v in izip(full_v_map_inverse, data.vertices):
1001 v.x = oldverts[old_i][0]
1002 v.y = oldverts[old_i][1]
1003 v.z = oldverts[old_i][2]
1004 del oldverts
1005 # update vertex indices in triangles
1006 for tri in data.triangles:
1007 tri.triangle.v_1 = full_v_map[tri.triangle.v_1]
1008 tri.triangle.v_2 = full_v_map[tri.triangle.v_2]
1009 tri.triangle.v_3 = full_v_map[tri.triangle.v_3]
1010 # at the moment recreating the mopp will destroy multi material mopps
1011 # (this is a bug in the mopper, not sure what it is)
1012 # so for now, we keep the mopp intact
1013 # and since the mopp code references the triangle indices
1014 # we must also keep the triangles intact
1015 if len(shape.get_sub_shapes()) != 1:
1016 return
1017
1018 # remove duplicate triangles
1019 self.toaster.msg(_("removing duplicate triangles"))
1020 t_map, t_map_inverse = unique_map(shape.get_triangle_hash_generator())
1021 new_numtriangles = len(t_map_inverse)
1022 self.toaster.msg(_("(num triangles in collision shape was %i and is now %i)")
1023 % (len(t_map), new_numtriangles))
1024 # copy old data
1025 oldtris = [[tri.triangle.v_1, tri.triangle.v_2, tri.triangle.v_3,
1026 tri.normal.x, tri.normal.y, tri.normal.z]
1027 for tri in data.triangles]
1028 # set new data
1029 data.num_triangles = new_numtriangles
1030 data.triangles.update_size()
1031 for old_i, tri in izip(t_map_inverse, data.triangles):
1032 if old_i is None:
1033 continue
1034 tri.triangle.v_1 = oldtris[old_i][0]
1035 tri.triangle.v_2 = oldtris[old_i][1]
1036 tri.triangle.v_3 = oldtris[old_i][2]
1037 tri.normal.x = oldtris[old_i][3]
1038 tri.normal.y = oldtris[old_i][4]
1039 tri.normal.z = oldtris[old_i][5]
1040 # note: welding updated later when calling the mopper
1041 del oldtris
1042 # update mopp data and welding info
1043 mopp.update_mopp_welding()
1044
1046 """Optimize a vertex based collision block:
1047 - remove duplicate vertices
1048 - rebuild triangle indices and welding info
1049 - update mopp data if applicable.
1050
1051 (note: latter two are skipped at the moment due to
1052 multimaterial bug in mopper)
1053 """
1054 if branch in self.optimized:
1055 # already optimized
1056 return False
1057
1058 if (isinstance(branch, NifFormat.bhkMoppBvTreeShape)
1059 and isinstance(branch.shape, NifFormat.bhkPackedNiTriStripsShape)
1060 and isinstance(branch.shape.data,
1061 NifFormat.hkPackedNiTriStripsData)):
1062 # packed collision with mopp
1063 self.toaster.msg(_("optimizing mopp"))
1064 self.optimize_mopp(branch)
1065 if branch.shape.data.num_vertices < 3:
1066 self.toaster.msg(_("less than 3 vertices: removing branch"))
1067 self.data.replace_global_node(branch, None)
1068 self.changed = True
1069 return False
1070 self.optimized.append(branch)
1071 self.changed = True
1072 return False
1073 elif (isinstance(branch, NifFormat.bhkRigidBody)
1074 and isinstance(branch.shape, NifFormat.bhkNiTriStripsShape)):
1075 if branch.layer == NifFormat.OblivionLayer.OL_CLUTTER:
1076 # packed collisions do not work for clutter
1077 # so skip it
1078 # see issue #3194017 reported by Gratis_monsta
1079 return False
1080 # unpacked collision: convert to packed
1081 self.toaster.msg(_("packing collision"))
1082 new_shape = branch.shape.get_interchangeable_packed_shape()
1083 self.data.replace_global_node(branch.shape, new_shape)
1084 # call branchentry again in order to create a mopp for it
1085 # so we don't append it to self.optimized yet!!
1086 self.branchentry(branch)
1087 self.changed = True
1088 # don't recurse further
1089 return False
1090 elif (isinstance(branch, NifFormat.bhkRigidBody)
1091 and isinstance(branch.shape,
1092 NifFormat.bhkPackedNiTriStripsShape)):
1093 # packed collision without mopp
1094 # add a mopp to it if it is static
1095 if any(sub_shape.layer != 1
1096 for sub_shape in branch.shape.get_sub_shapes()):
1097 # no mopps for non-static objects
1098 return False
1099 self.toaster.msg(_("adding mopp"))
1100 mopp = NifFormat.bhkMoppBvTreeShape()
1101 shape = branch.shape # store reference before replacing
1102 self.data.replace_global_node(branch.shape, mopp)
1103 mopp.shape = shape
1104 mopp.material = shape.get_sub_shapes()[0].material
1105 mopp.unknown_8_bytes[0] = 160
1106 mopp.unknown_8_bytes[1] = 13
1107 mopp.unknown_8_bytes[2] = 75
1108 mopp.unknown_8_bytes[3] = 1
1109 mopp.unknown_8_bytes[4] = 192
1110 mopp.unknown_8_bytes[5] = 207
1111 mopp.unknown_8_bytes[6] = 144
1112 mopp.unknown_8_bytes[7] = 11
1113 mopp.unknown_float = 1.0
1114 mopp.update_mopp_welding()
1115 # call branchentry again in order to optimize the mopp
1116 # so we don't append it to self.optimized yet!!
1117 self.branchentry(mopp)
1118 self.changed = True
1119 return False
1120 # keep recursing
1121 return True
1122
1124 """Optimizes animations by removing duplicate keys"""
1125
1126 SPELLNAME = "opt_optimizeanimation"
1127 READONLY = False
1128
1129 @classmethod
1131 if not toaster.options["arg"]:
1132 cls.significance_check = 4
1133 else:
1134 cls.significance_check = float(toaster.options["arg"])
1135 return True
1136
1137
1139 # returns more than needed but easiest way to ensure it catches all
1140 # types of animations
1141 return True
1142
1144 # inspect the NiAVObject branch, and NiControllerSequence
1145 # branch (for kf files)
1146 return isinstance(branch, (NifFormat.NiAVObject,
1147 NifFormat.NiTimeController,
1148 NifFormat.NiInterpolator,
1149 NifFormat.NiControllerManager,
1150 NifFormat.NiControllerSequence,
1151 NifFormat.NiKeyframeData,
1152 NifFormat.NiTextKeyExtraData,
1153 NifFormat.NiFloatData))
1154
1156 """Helper function to optimize the keys."""
1157 new_keys = []
1158 #compare keys
1159 ## types: 0 = float/int values
1160 ## 1 = Vector4, Quaternions, QuaternionsWXYZ
1161 ## 2 = word values (ie NiTextKeyExtraData)
1162 ## 3 = Vector3 values (ie translations)
1163 if len(keys) < 3: return keys # no optimization possible?
1164 precision = 10**self.significance_check
1165 if isinstance(keys[0].value,(float,int)):
1166 for i, key in enumerate(keys):
1167 if i == 0: # since we don't want to delete the first key even if it is the same as the last key.
1168 new_keys.append(key)
1169 continue
1170 try:
1171 if int(precision*keys[i-1].value) != int(precision*key.value):
1172 new_keys.append(key)
1173 continue
1174 if int(precision*keys[i+1].value) != int(precision*key.value):
1175 new_keys.append(key)
1176 except IndexError:
1177 new_keys.append(key)
1178 return new_keys
1179 elif isinstance(keys[0].value,(str)):
1180 for i, key in enumerate(keys):
1181 if i == 0: # since we don't want to delete the first key even if it is the same as the last key.
1182 new_keys.append(key)
1183 continue
1184 try:
1185 if keys[i-1].value != key.value:
1186 new_keys.append(key)
1187 continue
1188 if keys[i+1].value != key.value:
1189 new_keys.append(key)
1190 except IndexError:
1191 new_keys.append(key)
1192 return new_keys
1193 elif isinstance(keys[0].value,(NifFormat.Vector4,NifFormat.Quaternion,NifFormat.QuaternionXYZW)):
1194 tempkey = [[int(keys[0].value.w*precision),int(keys[0].value.x*precision),int(keys[0].value.y*precision),int(keys[0].value.z*precision)],[int(keys[1].value.w*precision),int(keys[1].value.x*precision),int(keys[1].value.y*precision),int(keys[1].value.z*precision)],[int(keys[2].value.w*precision),int(keys[2].value.x*precision),int(keys[2].value.y*precision),int(keys[2].value.z*precision)]]
1195 for i, key in enumerate(keys):
1196 if i == 0:
1197 new_keys.append(key)
1198 continue
1199 tempkey[0] = tempkey[1]
1200 tempkey[1] = tempkey[2]
1201 tempkey[2] = []
1202 try:
1203 tempkey[2].append(int(keys[i+1].value.w*precision))
1204 tempkey[2].append(int(keys[i+1].value.x*precision))
1205 tempkey[2].append(int(keys[i+1].value.y*precision))
1206 tempkey[2].append(int(keys[i+1].value.z*precision))
1207 except IndexError:
1208 new_keys.append(key)
1209 continue
1210 if tempkey[1] != tempkey[0]:
1211 new_keys.append(key)
1212 continue
1213 if tempkey[1] != tempkey[2]:
1214 new_keys.append(key)
1215 return new_keys
1216 elif isinstance(keys[0].value,(NifFormat.Vector3)):
1217 tempkey = [[int(keys[0].value.x*precision),int(keys[0].value.y*precision),int(keys[0].value.z*precision)],[int(keys[1].value.x*precision),int(keys[1].value.y*precision),int(keys[1].value.z*precision)],[int(keys[2].value.x*precision),int(keys[2].value.y*precision),int(keys[2].value.z*precision)]]
1218 for i, key in enumerate(keys):
1219 if i == 0:
1220 new_keys.append(key)
1221 continue
1222 tempkey[0] = tempkey[1]
1223 tempkey[1] = tempkey[2]
1224 tempkey[2] = []
1225 try:
1226 tempkey[2].append(int(keys[i+1].value.x*precision))
1227 tempkey[2].append(int(keys[i+1].value.y*precision))
1228 tempkey[2].append(int(keys[i+1].value.z*precision))
1229 except IndexError:
1230 new_keys.append(key)
1231 continue
1232 if tempkey[1] != tempkey[0]:
1233 new_keys.append(key)
1234 continue
1235 if tempkey[1] != tempkey[2]:
1236 new_keys.append(key)
1237 return new_keys
1238 else: #something unhandled -- but what?
1239
1240 return keys
1241
1243 self.toaster.msg(_("Num keys was %i and is now %i") % (len(old_keygroup.keys),len(new_keys)))
1244 old_keygroup.num_keys = len(new_keys)
1245 old_keygroup.keys.update_size()
1246 for old_key, new_key in izip(old_keygroup.keys,new_keys):
1247 old_key.time = new_key.time
1248 old_key.value = new_key.value
1249 self.changed = True
1250
1252 self.toaster.msg(_("Num keys was %i and is now %i") % (len(old_keygroup),len(new_keys)))
1253 old_keygroup.update_size()
1254 for old_key, new_key in izip(old_keygroup,new_keys):
1255 old_key.time = new_key.time
1256 old_key.value = new_key.value
1257 self.changed = True
1258
1260
1261 if isinstance(branch, NifFormat.NiKeyframeData):
1262 # (this also covers NiTransformData)
1263 if branch.num_rotation_keys != 0:
1264 if branch.rotation_type == 4:
1265 for rotation in branch.xyz_rotations:
1266 new_keys = self.optimize_keys(rotation.keys)
1267 if len(new_keys) != rotation.num_keys:
1268 self.update_animation(rotation,new_keys)
1269 else:
1270 new_keys = self.optimize_keys(branch.quaternion_keys)
1271 if len(new_keys) != branch.num_rotation_keys:
1272 branch.num_rotation_keys = len(new_keys)
1273 self.update_animation_quaternion(branch.quaternion_keys,new_keys)
1274 if branch.translations.num_keys != 0:
1275 new_keys = self.optimize_keys(branch.translations.keys)
1276 if len(new_keys) != branch.translations.num_keys:
1277 self.update_animation(branch.translations,new_keys)
1278 if branch.scales.num_keys != 0:
1279 new_keys = self.optimize_keys(branch.scales.keys)
1280 if len(new_keys) != branch.scales.num_keys:
1281 self.update_animation(branch.scales,new_keys)
1282 # no children of NiKeyframeData so no need to recurse further
1283 return False
1284 elif isinstance(branch, NifFormat.NiTextKeyExtraData):
1285 self.optimize_keys(branch.text_keys)
1286 # no children of NiTextKeyExtraData so no need to recurse further
1287 return False
1288 elif isinstance(branch, NifFormat.NiFloatData):
1289 #self.optimize_keys(branch.data.keys)
1290 # no children of NiFloatData so no need to recurse further
1291 return False
1292 else:
1293 # recurse further
1294 return True
1295
1296 -class SpellOptimize(
1297 pyffi.spells.SpellGroupSeries(
1298 pyffi.spells.nif.modify.SpellCleanFarNif,
1299 pyffi.spells.SpellGroupParallel(
1300 pyffi.spells.nif.fix.SpellDelUnusedRoots,
1301 SpellCleanRefLists,
1302 pyffi.spells.nif.fix.SpellDetachHavokTriStripsData,
1303 pyffi.spells.nif.fix.SpellFixTexturePath,
1304 pyffi.spells.nif.fix.SpellClampMaterialAlpha,
1305 pyffi.spells.nif.fix.SpellFixBhkSubShapes,
1306 pyffi.spells.nif.fix.SpellFixEmptySkeletonRoots),
1307 SpellMergeDuplicates,
1308 SpellOptimizeGeometry,
1309 SpellOptimizeCollisionBox,
1310 SpellOptimizeCollisionGeometry,
1311 )):
1314
| Home | Trees | Indices | Help |
|
|---|
| Generated by Epydoc 3.0.1 on Mon Oct 10 19:04:07 2011 | http://epydoc.sourceforge.net |