Package pyffi :: Package formats :: Package nif
[hide private]
[frames] | no frames]

Source Code for Package pyffi.formats.nif

   1  """ 
   2  :mod:`pyffi.formats.nif` --- NetImmerse/Gamebryo (.nif and .kf) 
   3  =============================================================== 
   4   
   5  Implementation 
   6  -------------- 
   7   
   8  .. autoclass:: NifFormat 
   9     :show-inheritance: 
  10     :members: 
  11   
  12  Regression tests 
  13  ---------------- 
  14   
  15  These tests are used to check for functionality and bugs in the library. 
  16  They also provide code examples which you may find useful. 
  17   
  18  Read a NIF file 
  19  ^^^^^^^^^^^^^^^ 
  20   
  21  >>> stream = open('tests/nif/test.nif', 'rb') 
  22  >>> data = NifFormat.Data() 
  23  >>> # inspect is optional; it will not read the actual blocks 
  24  >>> data.inspect(stream) 
  25  >>> hex(data.version) 
  26  '0x14010003' 
  27  >>> data.user_version 
  28  0 
  29  >>> for blocktype in data.header.block_types: 
  30  ...     print(blocktype.decode("ascii")) 
  31  NiNode 
  32  NiTriShape 
  33  NiTriShapeData 
  34  >>> data.roots # blocks have not been read yet, so this is an empty list 
  35  [] 
  36  >>> data.read(stream) 
  37  >>> for root in data.roots: 
  38  ...     for block in root.tree(): 
  39  ...         if isinstance(block, NifFormat.NiNode): 
  40  ...             print(block.name.decode("ascii")) 
  41  test 
  42  >>> stream.close() 
  43   
  44  Parse all NIF files in a directory tree 
  45  ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 
  46   
  47  >>> for stream, data in NifFormat.walkData('tests/nif'): 
  48  ...     try: 
  49  ...         # the replace call makes the doctest also pass on windows 
  50  ...         print("reading %s" % stream.name.replace("\\\\", "/")) 
  51  ...         data.read(stream) 
  52  ...     except Exception: 
  53  ...         print( 
  54  ...             "Warning: read failed due corrupt file," 
  55  ...             " corrupt format description, or bug.") # doctest: +REPORT_NDIFF 
  56  reading tests/nif/invalid.nif 
  57  Warning: read failed due corrupt file, corrupt format description, or bug. 
  58  reading tests/nif/nds.nif 
  59  reading tests/nif/neosteam.nif 
  60  reading tests/nif/test.nif 
  61  reading tests/nif/test_centerradius.nif 
  62  reading tests/nif/test_check_tangentspace1.nif 
  63  reading tests/nif/test_check_tangentspace2.nif 
  64  reading tests/nif/test_check_tangentspace3.nif 
  65  reading tests/nif/test_check_tangentspace4.nif 
  66  reading tests/nif/test_convexverticesshape.nif 
  67  reading tests/nif/test_dump_tex.nif 
  68  reading tests/nif/test_fix_clampmaterialalpha.nif 
  69  reading tests/nif/test_fix_cleanstringpalette.nif 
  70  reading tests/nif/test_fix_detachhavoktristripsdata.nif 
  71  reading tests/nif/test_fix_disableparallax.nif 
  72  reading tests/nif/test_fix_ffvt3rskinpartition.nif 
  73  reading tests/nif/test_fix_mergeskeletonroots.nif 
  74  reading tests/nif/test_fix_tangentspace.nif 
  75  reading tests/nif/test_fix_texturepath.nif 
  76  reading tests/nif/test_grid_128x128.nif 
  77  reading tests/nif/test_grid_64x64.nif 
  78  reading tests/nif/test_mopp.nif 
  79  reading tests/nif/test_opt_collision_complex_mopp.nif 
  80  reading tests/nif/test_opt_collision_mopp.nif 
  81  reading tests/nif/test_opt_collision_packed.nif 
  82  reading tests/nif/test_opt_collision_to_boxshape.nif 
  83  reading tests/nif/test_opt_collision_to_boxshape_notabox.nif 
  84  reading tests/nif/test_opt_collision_unpacked.nif 
  85  reading tests/nif/test_opt_delunusedbones.nif 
  86  reading tests/nif/test_opt_dupgeomdata.nif 
  87  reading tests/nif/test_opt_dupverts.nif 
  88  reading tests/nif/test_opt_emptyproperties.nif 
  89  reading tests/nif/test_opt_grid_layout.nif 
  90  reading tests/nif/test_opt_mergeduplicates.nif 
  91  reading tests/nif/test_opt_vertex_cache.nif 
  92  reading tests/nif/test_opt_zeroscale.nif 
  93  reading tests/nif/test_skincenterradius.nif 
  94  reading tests/nif/test_vertexcolor.nif 
  95   
  96  Create a NIF model from scratch and write to file 
  97  ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 
  98   
  99  >>> root = NifFormat.NiNode() 
 100  >>> root.name = 'Scene Root' 
 101  >>> blk = NifFormat.NiNode() 
 102  >>> root.add_child(blk) 
 103  >>> blk.name = 'new block' 
 104  >>> blk.scale = 2.4 
 105  >>> blk.translation.x = 3.9 
 106  >>> blk.rotation.m_11 = 1.0 
 107  >>> blk.rotation.m_22 = 1.0 
 108  >>> blk.rotation.m_33 = 1.0 
 109  >>> ctrl = NifFormat.NiVisController() 
 110  >>> ctrl.flags = 0x000c 
 111  >>> ctrl.target = blk 
 112  >>> blk.add_controller(ctrl) 
 113  >>> blk.add_controller(NifFormat.NiAlphaController()) 
 114  >>> strips = NifFormat.NiTriStrips() 
 115  >>> root.add_child(strips, front = True) 
 116  >>> strips.name = "hello world" 
 117  >>> strips.rotation.m_11 = 1.0 
 118  >>> strips.rotation.m_22 = 1.0 
 119  >>> strips.rotation.m_33 = 1.0 
 120  >>> data = NifFormat.NiTriStripsData() 
 121  >>> strips.data = data 
 122  >>> data.num_vertices = 5 
 123  >>> data.has_vertices = True 
 124  >>> data.vertices.update_size() 
 125  >>> for i, v in enumerate(data.vertices): 
 126  ...     v.x = 1.0+i/10.0 
 127  ...     v.y = 0.2+1.0/(i+1) 
 128  ...     v.z = 0.03 
 129  >>> data.update_center_radius() 
 130  >>> data.num_strips = 2 
 131  >>> data.strip_lengths.update_size() 
 132  >>> data.strip_lengths[0] = 3 
 133  >>> data.strip_lengths[1] = 4 
 134  >>> data.has_points = True 
 135  >>> data.points.update_size() 
 136  >>> data.points[0][0] = 0 
 137  >>> data.points[0][1] = 1 
 138  >>> data.points[0][2] = 2 
 139  >>> data.points[1][0] = 1 
 140  >>> data.points[1][1] = 2 
 141  >>> data.points[1][2] = 3 
 142  >>> data.points[1][3] = 4 
 143  >>> data.num_uv_sets = 1 
 144  >>> data.has_uv = True 
 145  >>> data.uv_sets.update_size() 
 146  >>> for i, v in enumerate(data.uv_sets[0]): 
 147  ...     v.u = 1.0-i/10.0 
 148  ...     v.v = 1.0/(i+1) 
 149  >>> data.has_normals = True 
 150  >>> data.normals.update_size() 
 151  >>> for i, v in enumerate(data.normals): 
 152  ...     v.x = 0.0 
 153  ...     v.y = 0.0 
 154  ...     v.z = 1.0 
 155  >>> strips.update_tangent_space() 
 156  >>> from tempfile import TemporaryFile 
 157  >>> stream = TemporaryFile() 
 158  >>> nifdata = NifFormat.Data(version=0x14010003, user_version=10) 
 159  >>> nifdata.roots = [root] 
 160  >>> nifdata.write(stream) 
 161  >>> stream.close() 
 162   
 163  Get list of versions and games 
 164  ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 
 165   
 166  >>> for vnum in sorted(NifFormat.versions.values()): 
 167  ...     print('0x%08X' % vnum) # doctest: +REPORT_UDIFF 
 168  0x02030000 
 169  0x03000000 
 170  0x03000300 
 171  0x03010000 
 172  0x0303000D 
 173  0x04000000 
 174  0x04000002 
 175  0x0401000C 
 176  0x04020002 
 177  0x04020100 
 178  0x04020200 
 179  0x0A000100 
 180  0x0A000102 
 181  0x0A000103 
 182  0x0A010000 
 183  0x0A010065 
 184  0x0A01006A 
 185  0x0A020000 
 186  0x0A020001 
 187  0x0A040001 
 188  0x14000004 
 189  0x14000005 
 190  0x14010003 
 191  0x14020007 
 192  0x14020008 
 193  0x14030001 
 194  0x14030002 
 195  0x14030003 
 196  0x14030006 
 197  0x14030009 
 198  0x14050000 
 199  0x14060000 
 200  0x14060500 
 201  0x1E000002 
 202  >>> for game, versions in sorted(NifFormat.games.items(), key=lambda x: x[0]): 
 203  ...     print("%s " % game + " ".join('0x%08X' % vnum for vnum in versions)) # doctest: +REPORT_UDIFF 
 204  ? 0x0A000103 
 205  Atlantica 0x14020008 
 206  Axis and Allies 0x0A010000 
 207  Bully SE 0x14030009 
 208  Civilization IV 0x04020002 0x04020100 0x04020200 0x0A000100 0x0A010000 \ 
 209  0x0A020000 0x14000004 
 210  Culpa Innata 0x04020200 
 211  Dark Age of Camelot 0x02030000 0x03000300 0x03010000 0x0401000C 0x04020100 \ 
 212  0x04020200 0x0A010000 
 213  Divinity 2 0x14030009 
 214  Emerge 0x14020007 0x14020008 0x14030001 0x14030002 0x14030003 0x14030006 \ 
 215  0x1E000002 
 216  Empire Earth II 0x04020200 0x0A010000 
 217  Empire Earth III 0x14020007 0x14020008 
 218  Entropia Universe 0x0A010000 
 219  Epic Mickey 0x14060500 
 220  Fallout 3 0x14020007 
 221  Freedom Force 0x04000000 0x04000002 
 222  Freedom Force vs. the 3rd Reich 0x0A010000 
 223  Howling Sword 0x14030009 
 224  Kohan 2 0x0A010000 
 225  KrazyRain 0x14050000 0x14060000 
 226  Lazeska 0x14030009 
 227  Loki 0x0A020000 
 228  Megami Tensei: Imagine 0x14010003 
 229  Morrowind 0x04000002 
 230  NeoSteam 0x0A010000 
 231  Oblivion 0x0303000D 0x0A000100 0x0A000102 0x0A010065 0x0A01006A 0x0A020000 0x14000004 \ 
 232  0x14000005 
 233  Prison Tycoon 0x0A020000 
 234  Pro Cycling Manager 0x0A020000 
 235  Red Ocean 0x0A020000 
 236  Sid Meier's Railroads 0x14000004 
 237  Star Trek: Bridge Commander 0x03000000 0x03010000 
 238  The Guild 2 0x0A010000 
 239  Warhammer 0x14030009 
 240  Wildlife Park 2 0x0A010000 0x0A020000 
 241  Worldshift 0x0A020001 0x0A040001 
 242  Zoo Tycoon 2 0x0A000100 
 243   
 244  Reading an unsupported nif file 
 245  ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 
 246   
 247  >>> stream = open('tests/nif/invalid.nif', 'rb') 
 248  >>> data = NifFormat.Data() 
 249  >>> data.inspect(stream) # the file seems ok on inspection 
 250  >>> data.read(stream) # doctest: +ELLIPSIS 
 251  Traceback (most recent call last): 
 252      ... 
 253  ValueError: ... 
 254  >>> stream.close() 
 255   
 256  Template types 
 257  ^^^^^^^^^^^^^^ 
 258   
 259  >>> block = NifFormat.NiTextKeyExtraData() 
 260  >>> block.num_text_keys = 1 
 261  >>> block.text_keys.update_size() 
 262  >>> block.text_keys[0].time = 1.0 
 263  >>> block.text_keys[0].value = 'hi' 
 264   
 265  Links 
 266  ^^^^^ 
 267   
 268  >>> NifFormat.NiNode._has_links 
 269  True 
 270  >>> NifFormat.NiBone._has_links 
 271  True 
 272  >>> skelroot = NifFormat.NiNode() 
 273  >>> geom = NifFormat.NiTriShape() 
 274  >>> geom.skin_instance = NifFormat.NiSkinInstance() 
 275  >>> geom.skin_instance.skeleton_root = skelroot 
 276  >>> [block.__class__.__name__ for block in geom.get_refs()] 
 277  ['NiSkinInstance'] 
 278  >>> [block.__class__.__name__ for block in geom.get_links()] 
 279  ['NiSkinInstance'] 
 280  >>> [block.__class__.__name__ for block in geom.skin_instance.get_refs()] 
 281  [] 
 282  >>> [block.__class__.__name__ for block in geom.skin_instance.get_links()] 
 283  ['NiNode'] 
 284   
 285  Strings 
 286  ^^^^^^^ 
 287   
 288  >>> extra = NifFormat.NiTextKeyExtraData() 
 289  >>> extra.num_text_keys = 2 
 290  >>> extra.text_keys.update_size() 
 291  >>> extra.text_keys[0].time = 0.0 
 292  >>> extra.text_keys[0].value = "start" 
 293  >>> extra.text_keys[1].time = 2.0 
 294  >>> extra.text_keys[1].value = "end" 
 295  >>> for extrastr in extra.get_strings(None): 
 296  ...     print(extrastr.decode("ascii")) 
 297  start 
 298  end 
 299  """ 
 300   
 301  # ***** BEGIN LICENSE BLOCK ***** 
 302  # 
 303  # Copyright (c) 2007-2011, NIF File Format Library and Tools. 
 304  # All rights reserved. 
 305  # 
 306  # Redistribution and use in source and binary forms, with or without 
 307  # modification, are permitted provided that the following conditions 
 308  # are met: 
 309  # 
 310  #    * Redistributions of source code must retain the above copyright 
 311  #      notice, this list of conditions and the following disclaimer. 
 312  # 
 313  #    * Redistributions in binary form must reproduce the above 
 314  #      copyright notice, this list of conditions and the following 
 315  #      disclaimer in the documentation and/or other materials provided 
 316  #      with the distribution. 
 317  # 
 318  #    * Neither the name of the NIF File Format Library and Tools 
 319  #      project nor the names of its contributors may be used to endorse 
 320  #      or promote products derived from this software without specific 
 321  #      prior written permission. 
 322  # 
 323  # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 
 324  # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 
 325  # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 
 326  # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 
 327  # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 
 328  # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 
 329  # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 
 330  # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 
 331  # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 
 332  # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 
 333  # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 
 334  # POSSIBILITY OF SUCH DAMAGE. 
 335  # 
 336  # ***** END LICENSE BLOCK ***** 
 337   
 338  from itertools import izip, repeat, chain 
 339  import logging 
 340  import math # math.pi 
 341  import os 
 342  import re 
 343  import struct 
 344  import sys 
 345  import warnings 
 346  import weakref 
 347   
 348  import pyffi.formats.bsa 
 349  import pyffi.formats.dds 
 350  import pyffi.object_models.common 
 351  import pyffi.object_models 
 352  from pyffi.object_models.xml import FileFormat 
 353  import pyffi.utils.inertia 
 354  from pyffi.utils.mathutils import * # XXX todo get rid of from XXX import * 
 355  import pyffi.utils.mopp 
 356  import pyffi.utils.tristrip 
 357  import pyffi.utils.vertex_cache 
 358  import pyffi.utils.quickhull 
 359  # XXX convert the following to absolute imports 
 360  from pyffi.object_models.editable import EditableBoolComboBox 
 361  from pyffi.utils.graph import EdgeFilter 
 362  from pyffi.object_models.xml.basic import BasicBase 
 363  from pyffi.object_models.xml.struct_ import StructBase 
364 365 366 367 -class NifFormat(FileFormat):
368 """This class contains the generated classes from the xml.""" 369 xml_file_name = 'nif.xml' 370 # where to look for nif.xml and in what order: NIFXMLPATH env var, 371 # or NifFormat module directory 372 xml_file_path = [os.getenv('NIFXMLPATH'), 373 os.path.join(os.path.dirname(__file__), "nifxml")] 374 # filter for recognizing nif files by extension 375 # .kf are nif files containing keyframes 376 # .kfa are nif files containing keyframes in DAoC style 377 # .nifcache are Empire Earth II nif files 378 # .texcache are Empire Earth II/III packed texture nif files 379 # .pcpatch are Empire Earth II/III packed texture nif files 380 # .item are Divinity 2 nif files 381 # .nft are Bully SE nif files (containing textures) 382 # .nif_wii are Epic Mickey nif files 383 RE_FILENAME = re.compile(r'^.*\.(nif|kf|kfa|nifcache|jmi|texcache|pcpatch|nft|item|nif_wii)$', re.IGNORECASE) 384 # archives 385 ARCHIVE_CLASSES = [pyffi.formats.bsa.BsaFormat] 386 # used for comparing floats 387 EPSILON = 0.0001 388 389 # basic types 390 ulittle32 = pyffi.object_models.common.ULittle32 391 int = pyffi.object_models.common.Int 392 uint = pyffi.object_models.common.UInt 393 byte = pyffi.object_models.common.UByte # not a typo 394 char = pyffi.object_models.common.Char 395 short = pyffi.object_models.common.Short 396 ushort = pyffi.object_models.common.UShort 397 float = pyffi.object_models.common.Float 398 BlockTypeIndex = pyffi.object_models.common.UShort 399 StringIndex = pyffi.object_models.common.UInt 400 SizedString = pyffi.object_models.common.SizedString 401 402 # implementation of nif-specific basic types 403
404 - class StringOffset(pyffi.object_models.common.Int):
405 """This is just an integer with -1 as default value."""
406 - def __init__(self, **kwargs):
407 pyffi.object_models.common.Int.__init__(self, **kwargs) 408 self.set_value(-1)
409
410 - class bool(BasicBase, EditableBoolComboBox):
411 """Basic implementation of a 32-bit (8-bit for versions > 4.0.0.2) 412 boolean type. 413 414 >>> i = NifFormat.bool() 415 >>> i.set_value('false') 416 >>> i.get_value() 417 False 418 >>> i.set_value('true') 419 >>> i.get_value() 420 True 421 """
422 - def __init__(self, **kwargs):
423 BasicBase.__init__(self, **kwargs) 424 self.set_value(False)
425
426 - def get_value(self):
427 return self._value
428
429 - def set_value(self, value):
430 if isinstance(value, basestring): 431 if value.lower() == 'false': 432 self._value = False 433 return 434 elif value == '0': 435 self._value = False 436 return 437 if value: 438 self._value = True 439 else: 440 self._value = False
441
442 - def get_size(self, data=None):
443 ver = data.version if data else -1 444 if ver > 0x04000002: 445 return 1 446 else: 447 return 4
448
449 - def get_hash(self, data=None):
450 return self._value
451
452 - def read(self, stream, data):
453 if data.version > 0x04000002: 454 value, = struct.unpack(data._byte_order + 'B', 455 stream.read(1)) 456 else: 457 value, = struct.unpack(data._byte_order + 'I', 458 stream.read(4)) 459 self._value = bool(value)
460
461 - def write(self, stream, data):
462 if data.version > 0x04000002: 463 stream.write(struct.pack(data._byte_order + 'B', 464 int(self._value))) 465 else: 466 stream.write(struct.pack(data._byte_order + 'I', 467 int(self._value)))
468
469 - class Flags(pyffi.object_models.common.UShort):
470 - def __str__(self):
471 return hex(self.get_value())
472
473 - class Ref(BasicBase):
474 """Reference to another block.""" 475 _is_template = True 476 _has_links = True 477 _has_refs = True
478 - def __init__(self, **kwargs):
479 BasicBase.__init__(self, **kwargs) 480 self._template = kwargs.get("template") 481 self.set_value(None)
482
483 - def get_value(self):
484 return self._value
485
486 - def set_value(self, value):
487 if value is None: 488 self._value = None 489 else: 490 if not isinstance(value, self._template): 491 raise TypeError( 492 'expected an instance of %s but got instance of %s' 493 % (self._template, value.__class__)) 494 self._value = value
495
496 - def get_size(self, data=None):
497 return 4
498
499 - def get_hash(self, data=None):
500 if self.get_value(): 501 return self.get_value().get_hash(data) 502 else: 503 return None
504
505 - def read(self, stream, data):
506 self.set_value(None) # fix_links will set this field 507 block_index, = struct.unpack(data._byte_order + 'i', 508 stream.read(4)) 509 data._link_stack.append(block_index)
510
511 - def write(self, stream, data):
512 """Write block reference.""" 513 if self.get_value() is None: 514 # -1: link by number, 0: link by pointer 515 block_index = -1 if data.version >= 0x0303000D else 0 516 else: 517 try: 518 block_index = data._block_index_dct[self.get_value()] 519 except KeyError: 520 logging.getLogger("pyffi.nif.ref").warn( 521 "%s block is missing from the nif tree:" 522 " omitting reference" 523 % self.get_value().__class__.__name__) 524 # -1: link by number, 0: link by pointer 525 block_index = -1 if data.version >= 0x0303000D else 0 526 stream.write(struct.pack( 527 data._byte_order + 'i', block_index))
528 550 557
558 - def get_refs(self, data=None):
559 val = self.get_value() 560 if val is not None: 561 return [val] 562 else: 563 return []
564
565 - def replace_global_node(self, oldbranch, newbranch, 566 edge_filter=EdgeFilter()):
567 """ 568 >>> from pyffi.formats.nif import NifFormat 569 >>> x = NifFormat.NiNode() 570 >>> y = NifFormat.NiNode() 571 >>> z = NifFormat.NiNode() 572 >>> x.add_child(y) 573 >>> x.children[0] is y 574 True 575 >>> x.children[0] is z 576 False 577 >>> x.replace_global_node(y, z) 578 >>> x.children[0] is y 579 False 580 >>> x.children[0] is z 581 True 582 >>> x.replace_global_node(z, None) 583 >>> x.children[0] is None 584 True 585 """ 586 if self.get_value() is oldbranch: 587 # set_value takes care of template type 588 self.set_value(newbranch) 589 #print("replacing", repr(oldbranch), "->", repr(newbranch)) 590 if self.get_value() is not None: 591 self.get_value().replace_global_node(oldbranch, newbranch)
592
593 - def get_detail_display(self):
594 # return the node itself, if it is not None 595 if self.get_value() is not None: 596 return self.get_value() 597 else: 598 return "None"
599
600 - class Ptr(Ref):
601 """A weak reference to another block, used to point up the hierarchy tree. The reference is not returned by the L{get_refs} function to avoid infinite recursion.""" 602 _is_template = True 603 _has_links = True 604 _has_refs = False 605 606 # use weak reference to aid garbage collection 607
608 - def get_value(self):
609 return self._value() if self._value is not None else None
610
611 - def set_value(self, value):
612 if value is None: 613 self._value = None 614 else: 615 if not isinstance(value, self._template): 616 raise TypeError( 617 'expected an instance of %s but got instance of %s' 618 % (self._template, value.__class__)) 619 self._value = weakref.ref(value)
620
621 - def __str__(self):
622 # avoid infinite recursion 623 return '%s instance at 0x%08X'%(self._value.__class__, id(self._value))
624
625 - def get_refs(self, data=None):
626 return []
627
628 - def get_hash(self, data=None):
629 return None
630
631 - def replace_global_node(self, oldbranch, newbranch, 632 edge_filter=EdgeFilter()):
633 # overridden to avoid infinite recursion 634 if self.get_value() is oldbranch: 635 self.set_value(newbranch)
636 #print("replacing", repr(oldbranch), "->", repr(newbranch)) 637
638 - class LineString(BasicBase):
639 """Basic type for strings ending in a newline character (0x0a). 640 641 >>> from tempfile import TemporaryFile 642 >>> f = TemporaryFile() 643 >>> l = NifFormat.LineString() 644 >>> f.write('abcdefg\\x0a'.encode()) 645 >>> f.seek(0) 646 >>> l.read(f) 647 >>> str(l) 648 'abcdefg' 649 >>> f.seek(0) 650 >>> l.set_value('Hi There') 651 >>> l.write(f) 652 >>> f.seek(0) 653 >>> m = NifFormat.LineString() 654 >>> m.read(f) 655 >>> str(m) 656 'Hi There' 657 """
658 - def __init__(self, **kwargs):
659 BasicBase.__init__(self, **kwargs) 660 self.set_value('')
661
662 - def get_value(self):
663 return self._value
664
665 - def set_value(self, value):
666 self._value = pyffi.object_models.common._as_bytes(value).rstrip('\x0a'.encode("ascii"))
667
668 - def __str__(self):
670
671 - def get_size(self, data=None):
672 return len(self._value) + 1 # +1 for trailing endline
673
674 - def get_hash(self, data=None):
675 return self.get_value()
676
677 - def read(self, stream, data=None):
678 self._value = stream.readline().rstrip('\x0a'.encode("ascii"))
679
680 - def write(self, stream, data=None):
681 stream.write(self._value) 682 stream.write("\x0a".encode("ascii"))
683
684 - class HeaderString(BasicBase):
685 - def __str__(self):
686 return 'NetImmerse/Gamebryo File Format, Version x.x.x.x'
687
688 - def get_detail_display(self):
689 return self.__str__()
690
691 - def get_hash(self, data=None):
692 return None
693
694 - def read(self, stream, data):
695 version_string = self.version_string(data.version, data.modification) 696 s = stream.read(len(version_string)) 697 if s != version_string.encode("ascii"): 698 raise ValueError( 699 "invalid NIF header: expected '%s' but got '%s'" 700 % (version_string, s)) 701 # for almost all nifs we have version_string + \x0a 702 # but Bully SE has some nifs with version_string + \x0d\x0a 703 # see for example World/BBonusB.nft 704 eol = stream.read(1) 705 if eol == '\x0d'.encode("ascii"): 706 eol = stream.read(1) 707 if eol != '\x0a'.encode("ascii"): 708 raise ValueError( 709 "invalid NIF header: bad version string eol")
710
711 - def write(self, stream, data):
712 stream.write(self.version_string(data.version, data.modification).encode("ascii")) 713 stream.write('\x0a'.encode("ascii"))
714
715 - def get_size(self, data=None):
716 ver = data.version if data else -1 717 return len(self.version_string(ver).encode("ascii")) + 1
718 719 @staticmethod
720 - def version_string(version, modification=None):
721 """Transforms version number into a version string. 722 723 >>> NifFormat.HeaderString.version_string(0x03000300) 724 'NetImmerse File Format, Version 3.03' 725 >>> NifFormat.HeaderString.version_string(0x03010000) 726 'NetImmerse File Format, Version 3.1' 727 >>> NifFormat.HeaderString.version_string(0x0A000100) 728 'NetImmerse File Format, Version 10.0.1.0' 729 >>> NifFormat.HeaderString.version_string(0x0A010000) 730 'Gamebryo File Format, Version 10.1.0.0' 731 >>> NifFormat.HeaderString.version_string(0x0A010000, 732 ... modification="neosteam") 733 'NS' 734 >>> NifFormat.HeaderString.version_string(0x14020008, 735 ... modification="ndoors") 736 'NDSNIF....@....@...., Version 20.2.0.8' 737 >>> NifFormat.HeaderString.version_string(0x14030009, 738 ... modification="jmihs1") 739 'Joymaster HS1 Object Format - (JMI), Version 20.3.0.9' 740 """ 741 if version == -1 or version is None: 742 raise ValueError('No string for version %s.'%version) 743 if modification == "neosteam": 744 if version != 0x0A010000: 745 raise ValueError("NeoSteam must have version 0x0A010000.") 746 return "NS" 747 elif version <= 0x0A000102: 748 s = "NetImmerse" 749 else: 750 s = "Gamebryo" 751 if version == 0x03000300: 752 v = "3.03" 753 elif version <= 0x03010000: 754 v = "%i.%i"%((version >> 24) & 0xff, (version >> 16) & 0xff) 755 else: 756 v = "%i.%i.%i.%i"%((version >> 24) & 0xff, (version >> 16) & 0xff, (version >> 8) & 0xff, version & 0xff) 757 if modification == "ndoors": 758 return "NDSNIF....@....@...., Version %s" % v 759 elif modification == "jmihs1": 760 return "Joymaster HS1 Object Format - (JMI), Version %s" % v 761 else: 762 return "%s File Format, Version %s" % (s, v)
763
764 - class FileVersion(BasicBase):
765 - def get_value(self):
766 raise NotImplementedError
767
768 - def set_value(self, value):
769 raise NotImplementedError
770
771 - def __str__(self):
772 return 'x.x.x.x'
773
774 - def get_size(self, data=None):
775 return 4
776
777 - def get_hash(self, data=None):
778 return None
779
780 - def read(self, stream, data):
781 modification = data.modification 782 ver, = struct.unpack('<I', stream.read(4)) # always little endian 783 if (not modification) or modification == "jmihs1": 784 if ver != data.version: 785 raise ValueError( 786 "Invalid version number: " 787 "expected 0x%08X but got 0x%08X." 788 % (data.version, ver)) 789 elif modification == "neosteam": 790 if ver != 0x08F35232: 791 raise ValueError( 792 "Invalid NeoSteam version number: " 793 "expected 0x%08X but got 0x%08X." 794 % (0x08F35232, ver)) 795 elif modification == "ndoors": 796 if ver != 0x73615F67: 797 raise ValueError( 798 "Invalid Ndoors version number: " 799 "expected 0x%08X but got 0x%08X." 800 % (0x73615F67, ver)) 801 elif modification == "laxelore": 802 if ver != 0x5A000004: 803 raise ValueError( 804 "Invalid Laxe Lore version number: " 805 "expected 0x%08X but got 0x%08X." 806 % (0x5A000004, ver)) 807 else: 808 raise ValueError( 809 "unknown modification: '%s'" % modification)
810
811 - def write(self, stream, data):
812 # always little endian 813 modification = data.modification 814 if (not modification) or modification == "jmihs1": 815 stream.write(struct.pack('<I', data.version)) 816 elif modification == "neosteam": 817 stream.write(struct.pack('<I', 0x08F35232)) 818 elif modification == "ndoors": 819 stream.write(struct.pack('<I', 0x73615F67)) 820 elif modification == "laxelore": 821 stream.write(struct.pack('<I', 0x5A000004)) 822 else: 823 raise ValueError( 824 "unknown modification: '%s'" % modification)
825
826 - def get_detail_display(self):
827 return 'x.x.x.x'
828
829 - class ShortString(BasicBase):
830 """Another type for strings."""
831 - def __init__(self, **kwargs):
832 BasicBase.__init__(self, **kwargs) 833 self._value = ''.encode("ascii")
834
835 - def get_value(self):
836 return self._value
837
838 - def set_value(self, value):
839 val = pyffi.object_models.common._as_bytes(value) 840 if len(val) > 254: 841 raise ValueError('string too long') 842 self._value = val
843
844 - def __str__(self):
846
847 - def get_size(self, data=None):
848 # length byte + string chars + zero byte 849 return len(self._value) + 2
850
851 - def get_hash(self, data=None):
852 return self.get_value()
853
854 - def read(self, stream, data):
855 n, = struct.unpack(data._byte_order + 'B', 856 stream.read(1)) 857 self._value = stream.read(n).rstrip('\x00'.encode("ascii"))
858
859 - def write(self, stream, data):
860 stream.write(struct.pack(data._byte_order + 'B', 861 len(self._value)+1)) 862 stream.write(self._value) 863 stream.write('\x00'.encode("ascii"))
864
865 - class string(SizedString):
866 _has_strings = True 867
868 - def get_size(self, data=None):
869 ver = data.version if data else -1 870 if ver >= 0x14010003: 871 return 4 872 else: 873 return 4 + len(self._value)
874
875 - def read(self, stream, data):
876 n, = struct.unpack(data._byte_order + 'i', stream.read(4)) 877 if data.version >= 0x14010003: 878 if n == -1: 879 self._value = ''.encode("ascii") 880 else: 881 try: 882 self._value = data._string_list[n] 883 except IndexError: 884 raise ValueError('string index too large (%i)'%n) 885 else: 886 if n > 10000: 887 raise ValueError('string too long (0x%08X at 0x%08X)' 888 % (n, stream.tell())) 889 self._value = stream.read(n)
890
891 - def write(self, stream, data):
892 if data.version >= 0x14010003: 893 if not self._value: 894 stream.write( 895 struct.pack(data._byte_order + 'i', -1)) 896 else: 897 try: 898 stream.write(struct.pack( 899 data._byte_order + 'i', 900 data._string_list.index(self._value))) 901 except ValueError: 902 raise ValueError( 903 "string '%s' not in string list" % self._value) 904 else: 905 stream.write(struct.pack(data._byte_order + 'I', 906 len(self._value))) 907 stream.write(self._value)
908
909 - def get_strings(self, data):
910 if self._value: 911 return [self._value] 912 else: 913 return []
914
915 - def get_hash(self, data=None):
916 return self.get_value()
917 918 # other types with internal implementation 919
920 - class FilePath(string):
921 """A file path."""
922 - def get_hash(self, data=None):
923 """Returns a case insensitive hash value.""" 924 return self.get_value().lower()
925
926 - class ByteArray(BasicBase):
927 """Array (list) of bytes. Implemented as basic type to speed up reading 928 and also to prevent data to be dumped by __str__."""
929 - def __init__(self, **kwargs):
930 BasicBase.__init__(self, **kwargs) 931 self.set_value("".encode()) # b'' for > py25
932
933 - def get_value(self):
934 return self._value
935
936 - def set_value(self, value):
938
939 - def get_size(self, data=None):
940 return len(self._value) + 4
941
942 - def get_hash(self, data=None):
943 return self._value.__hash__()
944
945 - def read(self, stream, data):
946 size, = struct.unpack(data._byte_order + 'I', 947 stream.read(4)) 948 self._value = stream.read(size)
949
950 - def write(self, stream, data):
951 stream.write(struct.pack(data._byte_order + 'I', 952 len(self._value))) 953 stream.write(self._value)
954
955 - def __str__(self):
956 return "< %i Bytes >" % len(self._value)
957
958 - class ByteMatrix(BasicBase):
959 """Matrix of bytes. Implemented as basic type to speed up reading 960 and to prevent data being dumped by __str__."""
961 - def __init__(self, **kwargs):
962 BasicBase.__init__(self, **kwargs) 963 self.set_value([])
964
965 - def get_value(self):
966 return self._value
967
968 - def set_value(self, value):
969 assert(isinstance(value, list)) 970 if value: 971 size1 = len(value[0]) 972 for x in value: 973 # TODO fix this for py3k 974 #assert(isinstance(x, basestring)) 975 assert(len(x) == size1) 976 self._value = value # should be a list of strings of bytes
977
978 - def get_size(self, data=None):
979 if len(self._value) == 0: 980 return 8 981 else: 982 return len(self._value) * len(self._value[0]) + 8
983
984 - def get_hash(self, data=None):
985 return tuple( x.__hash__() for x in self._value )
986
987 - def read(self, stream, data):
988 size1, = struct.unpack(data._byte_order + 'I', 989 stream.read(4)) 990 size2, = struct.unpack(data._byte_order + 'I', 991 stream.read(4)) 992 self._value = [] 993 for i in xrange(size2): 994 self._value.append(stream.read(size1))
995
996 - def write(self, stream, data):
997 if self._value: 998 stream.write(struct.pack(data._byte_order + 'I', 999 len(self._value[0]))) 1000 else: 1001 stream.write(struct.pack(data._byte_order + 'I', 0)) 1002 stream.write(struct.pack(data._byte_order + 'I', 1003 len(self._value))) 1004 for x in self._value: 1005 stream.write(x)
1006
1007 - def __str__(self):
1008 size1 = len(self._value[0]) if self._value else 0 1009 size2 = len(self._value) 1010 return "< %ix%i Bytes >" % (size2, size1)
1011 1012 @classmethod
1013 - def vercondFilter(cls, expression):
1014 if expression == "Version": 1015 return "version" 1016 elif expression == "User Version": 1017 return "user_version" 1018 elif expression == "User Version 2": 1019 return "user_version2" 1020 ver = cls.version_number(expression) 1021 if ver < 0: 1022 # not supported? 1023 raise ValueError( 1024 "cannot recognize version expression '%s'" % expression) 1025 else: 1026 return ver
1027 1028 @staticmethod
1029 - def version_number(version_str):
1030 """Converts version string into an integer. 1031 1032 :param version_str: The version string. 1033 :type version_str: str 1034 :return: A version integer. 1035 1036 >>> hex(NifFormat.version_number('3.14.15.29')) 1037 '0x30e0f1d' 1038 >>> hex(NifFormat.version_number('1.2')) 1039 '0x1020000' 1040 >>> hex(NifFormat.version_number('3.03')) 1041 '0x3000300' 1042 >>> hex(NifFormat.version_number('NS')) 1043 '0xa010000' 1044 """ 1045 1046 # 3.03 case is special 1047 if version_str == '3.03': 1048 return 0x03000300 1049 1050 # NS (neosteam) case is special 1051 if version_str == 'NS': 1052 return 0x0A010000 1053 1054 try: 1055 ver_list = [int(x) for x in version_str.split('.')] 1056 except ValueError: 1057 return -1 # version not supported (i.e. version_str '10.0.1.3a' would trigger this) 1058 if len(ver_list) > 4 or len(ver_list) < 1: 1059 return -1 # version not supported 1060 for ver_digit in ver_list: 1061 if (ver_digit | 0xff) > 0xff: 1062 return -1 # version not supported 1063 while len(ver_list) < 4: ver_list.append(0) 1064 return (ver_list[0] << 24) + (ver_list[1] << 16) + (ver_list[2] << 8) + ver_list[3]
1065 1066 # exceptions
1067 - class NifError(Exception):
1068 """Standard nif exception class.""" 1069 pass
1070
1071 - class Data(pyffi.object_models.FileFormat.Data):
1072 """A class to contain the actual nif data. 1073 1074 Note that L{header} and L{blocks} are not automatically kept 1075 in sync with the rest of the nif data, but they are 1076 resynchronized when calling L{write}. 1077 1078 :ivar version: The nif version. 1079 :type version: ``int`` 1080 :ivar user_version: The nif user version. 1081 :type user_version: ``int`` 1082 :ivar user_version2: The nif user version 2. 1083 :type user_version2: ``int`` 1084 :ivar roots: List of root blocks. 1085 :type roots: ``list`` of L{NifFormat.NiObject} 1086 :ivar header: The nif header. 1087 :type header: L{NifFormat.Header} 1088 :ivar blocks: List of blocks. 1089 :type blocks: ``list`` of L{NifFormat.NiObject} 1090 :ivar modification: Neo Steam ("neosteam") or Ndoors ("ndoors") or Joymaster Interactive Howling Sword ("jmihs1") or Laxe Lore ("laxelore") style nif? 1091 :type modification: ``str`` 1092 """ 1093 1094 _link_stack = None 1095 _block_dct = None 1096 _string_list = None 1097 _block_index_dct = None 1098
1099 - class VersionUInt(pyffi.object_models.common.UInt):
1100 - def set_value(self, value):
1101 if value is None: 1102 self._value = None 1103 else: 1104 pyffi.object_models.common.UInt.set_value(self, value)
1105
1106 - def __str__(self):
1107 if self._value is None: 1108 return "None" 1109 else: 1110 return "0x%08X" % self.get_value()
1111
1112 - def get_detail_display(self):
1113 return self.__str__()
1114
1115 - def __init__(self, version=0x04000002, user_version=0, user_version2=0):
1116 """Initialize nif data. By default, this creates an empty 1117 nif document of the given version and user version. 1118 1119 :param version: The version. 1120 :type version: ``int`` 1121 :param user_version: The user version. 1122 :type user_version: ``int`` 1123 """ 1124 # the version numbers are stored outside the header structure 1125 self._version_value_ = self.VersionUInt() 1126 self._version_value_.set_value(version) 1127 self._user_version_value_ = self.VersionUInt() 1128 self._user_version_value_.set_value(user_version) 1129 self._user_version_2_value_ = self.VersionUInt() 1130 self._user_version_2_value_.set_value(user_version2) 1131 # create new header 1132 self.header = NifFormat.Header() 1133 # empty list of root blocks (this encodes the footer) 1134 self.roots = [] 1135 # empty list of blocks 1136 self.blocks = [] 1137 # not a neosteam or ndoors nif 1138 self.modification = None
1139
1140 - def _getVersion(self):
1141 return self._version_value_.get_value()
1142 - def _setVersion(self, value):
1143 self._version_value_.set_value(value)
1144
1145 - def _getUserVersion(self):
1146 return self._user_version_value_.get_value()
1147 - def _setUserVersion(self, value):
1148 self._user_version_value_.set_value(value)
1149
1150 - def _getUserVersion2(self):
1151 return self._user_version_2_value_.get_value()
1152 - def _setUserVersion2(self, value):
1153 self._user_version_2_value_.set_value(value)
1154 1155 version = property(_getVersion, _setVersion) 1156 user_version = property(_getUserVersion, _setUserVersion) 1157 user_version2 = property(_getUserVersion2, _setUserVersion2) 1158 1159 # new functions 1160
1161 - def inspect_version_only(self, stream):
1162 """This function checks the version only, and is faster 1163 than the usual inspect function (which reads the full 1164 header). Sets the L{version} and L{user_version} instance 1165 variables if the stream contains a valid nif file. 1166 1167 Call this function if you simply wish to check that a file is 1168 a nif file without having to parse even the header. 1169 1170 :raise ``ValueError``: If the stream does not contain a nif file. 1171 :param stream: The stream from which to read. 1172 :type stream: ``file`` 1173 """ 1174 pos = stream.tell() 1175 try: 1176 s = stream.readline(64).rstrip() 1177 finally: 1178 stream.seek(pos) 1179 self.modification = None 1180 if s.startswith("NetImmerse File Format, Version ".encode("ascii")): 1181 version_str = s[32:].decode("ascii") 1182 elif s.startswith("Gamebryo File Format, Version ".encode("ascii")): 1183 version_str = s[30:].decode("ascii") 1184 elif s.startswith("NS".encode("ascii")): 1185 # neosteam 1186 version_str = "NS" 1187 self.modification = "neosteam" 1188 elif s.startswith("NDSNIF....@....@...., Version ".encode("ascii")): 1189 version_str = s[30:].decode("ascii") 1190 self.modification = "ndoors" 1191 elif s.startswith("Joymaster HS1 Object Format - (JMI), Version ".encode("ascii")): 1192 version_str = s[45:].decode("ascii") 1193 self.modification = "jmihs1" 1194 else: 1195 raise ValueError("Not a nif file.") 1196 try: 1197 ver = NifFormat.version_number(version_str) 1198 except: 1199 raise ValueError("Nif version %s not supported." % version_str) 1200 if not ver in NifFormat.versions.values(): 1201 raise ValueError("Nif version %s not supported." % version_str) 1202 # check version integer and user version 1203 userver = 0 1204 userver2 = 0 1205 if ver >= 0x0303000D: 1206 ver_int = None 1207 try: 1208 stream.readline(64) 1209 ver_int, = struct.unpack('<I', stream.read(4)) 1210 # special case for Laxe Lore 1211 if ver_int == 0x5A000004 and ver == 0x14000004: 1212 self.modification = "laxelore" 1213 # neosteam and ndoors have a special version integer 1214 elif (not self.modification) or self.modification == "jmihs1": 1215 if ver_int != ver: 1216 raise ValueError( 1217 "Corrupted nif file: header version string %s" 1218 " does not correspond with header version field" 1219 " 0x%08X." % (version_str, ver_int)) 1220 elif self.modification == "neosteam": 1221 if ver_int != 0x08F35232: 1222 raise ValueError( 1223 "Corrupted nif file: invalid NeoSteam version.") 1224 elif self.modification == "ndoors": 1225 if ver_int != 0x73615F67: 1226 raise ValueError( 1227 "Corrupted nif file: invalid Ndoors version.") 1228 if ver >= 0x14000004: 1229 endian_type, = struct.unpack('<B', stream.read(1)) 1230 if endian_type == 0: 1231 # big endian! 1232 self._byte_order = '>' 1233 if ver >= 0x0A010000: 1234 userver, = struct.unpack('<I', stream.read(4)) 1235 if userver in (10, 11): 1236 stream.read(4) # number of blocks 1237 userver2, = struct.unpack('<I', stream.read(4)) 1238 finally: 1239 stream.seek(pos) 1240 self.version = ver 1241 self.user_version = userver 1242 self.user_version2 = userver2
1243 1244 # GlobalNode 1245
1246 - def get_global_child_nodes(self, edge_filter=EdgeFilter()):
1247 return (root for root in self.roots)
1248 1249 # DetailNode 1250
1251 - def replace_global_node(self, oldbranch, newbranch, 1252 edge_filter=EdgeFilter()):
1253 for i, root in enumerate(self.roots): 1254 if root is oldbranch: 1255 self.roots[i] = newbranch 1256 else: 1257 root.replace_global_node(oldbranch, newbranch, 1258 edge_filter=edge_filter)
1259
1260 - def get_detail_child_nodes(self, edge_filter=EdgeFilter()):
1261 yield self._version_value_ 1262 yield self._user_version_value_ 1263 yield self._user_version_2_value_ 1264 yield self.header
1265
1266 - def get_detail_child_names(self, edge_filter=EdgeFilter()):
1267 yield "Version" 1268 yield "User Version" 1269 yield "User Version 2" 1270 yield "Header"
1271 1272 # overriding pyffi.object_models.FileFormat.Data methods 1273
1274 - def inspect(self, stream):
1275 """Quickly checks whether the stream appears to contain 1276 nif data, and read the nif header. Resets stream to original position. 1277 1278 Call this function if you only need to inspect the header of the nif. 1279 1280 :param stream: The file to inspect. 1281 :type stream: ``file`` 1282 """ 1283 pos = stream.tell() 1284 try: 1285 self.inspect_version_only(stream) 1286 self.header.read(stream, data=self) 1287 finally: 1288 stream.seek(pos)
1289
1290 - def read(self, stream):
1291 """Read a nif file. Does not reset stream position. 1292 1293 :param stream: The stream from which to read. 1294 :type stream: ``file`` 1295 """ 1296 logger = logging.getLogger("pyffi.nif.data") 1297 # read header 1298 logger.debug("Reading header at 0x%08X" % stream.tell()) 1299 self.inspect_version_only(stream) 1300 logger.debug("Version 0x%08X" % self.version) 1301 self.header.read(stream, data=self) 1302 1303 # list of root blocks 1304 # for versions < 3.3.0.13 this list is updated through the 1305 # "Top Level Object" string while reading the blocks 1306 # for more recent versions, this list is updated at the end when the 1307 # footer is read 1308 self.roots = [] 1309 1310 # read the blocks 1311 self._link_stack = [] # list of indices, as they are added to the stack 1312 self._string_list = [s for s in self.header.strings] 1313 self._block_dct = {} # maps block index to actual block 1314 self.blocks = [] # records all blocks as read from file in order 1315 block_num = 0 # the current block numner 1316 1317 while True: 1318 if self.version < 0x0303000D: 1319 # check if this is a 'Top Level Object' 1320 pos = stream.tell() 1321 top_level_str = NifFormat.SizedString() 1322 top_level_str.read(stream, data=self) 1323 top_level_str = str(top_level_str) 1324 if top_level_str == "Top Level Object": 1325 is_root = True 1326 else: 1327 is_root = False 1328 stream.seek(pos) 1329 else: 1330 # signal as no root for now, roots are added when the footer 1331 # is read 1332 is_root = False 1333 1334 # get block name 1335 if self.version >= 0x05000001: 1336 # note the 0xfff mask: required for the NiPhysX blocks 1337 block_type = self.header.block_types[ 1338 self.header.block_type_index[block_num] & 0xfff] 1339 block_type = block_type.decode("ascii") 1340 # handle data stream classes 1341 if block_type.startswith("NiDataStream\x01"): 1342 block_type, data_stream_usage, data_stream_access = block_type.split("\x01") 1343 data_stream_usage = int(data_stream_usage) 1344 data_stream_access = int(data_stream_access) 1345 # read dummy integer 1346 # bhk blocks are *not* preceeded by a dummy 1347 if self.version <= 0x0A01006A and not block_type.startswith("bhk"): 1348 dummy, = struct.unpack(self._byte_order + 'I', 1349 stream.read(4)) 1350 if dummy != 0: 1351 raise NifFormat.NifError( 1352 'non-zero block tag 0x%08X at 0x%08X)' 1353 %(dummy, stream.tell())) 1354 else: 1355 block_type = NifFormat.SizedString() 1356 block_type.read(stream, self) 1357 block_type = block_type.get_value().decode("ascii") 1358 # get the block index 1359 if self.version >= 0x0303000D: 1360 # for these versions the block index is simply the block number 1361 block_index = block_num 1362 else: 1363 # earlier versions 1364 # the number of blocks is not in the header 1365 # and a special block type string marks the end of the file 1366 if block_type == "End Of File": break 1367 # read the block index, which is probably the memory 1368 # location of the object when it was written to 1369 # memory 1370 else: 1371 block_index, = struct.unpack( 1372 self._byte_order + 'I', stream.read(4)) 1373 if block_index in self._block_dct: 1374 raise NifFormat.NifError( 1375 'duplicate block index (0x%08X at 0x%08X)' 1376 %(block_index, stream.tell())) 1377 # create the block 1378 try: 1379 block = getattr(NifFormat, block_type)() 1380 except AttributeError: 1381 raise ValueError( 1382 "Unknown block type '%s'." % block_type) 1383 logger.debug("Reading %s block at 0x%08X" 1384 % (block_type, stream.tell())) 1385 # read the block 1386 try: 1387 block.read(stream, self) 1388 except: 1389 logger.exception("Reading %s failed" % block.__class__) 1390 #logger.error("link stack: %s" % self._link_stack) 1391 #logger.error("block that failed:") 1392 #logger.error("%s" % block) 1393 raise 1394 # complete NiDataStream data 1395 if block_type == "NiDataStream": 1396 block.usage = data_stream_usage 1397 block.access.from_int(data_stream_access, self) 1398 # store block index 1399 self._block_dct[block_index] = block 1400 self.blocks.append(block) 1401 # check block size 1402 if self.version >= 0x14020007: 1403 logger.debug("Checking block size") 1404 calculated_size = block.get_size(data=self) 1405 if calculated_size != self.header.block_size[block_num]: 1406 extra_size = self.header.block_size[block_num] - calculated_size 1407 logger.error( 1408 "Block size check failed: corrupt nif file " 1409 "or bad nif.xml?") 1410 logger.error("Skipping %i bytes in %s" 1411 % (extra_size, block.__class__.__name__)) 1412 # skip bytes that were missed 1413 stream.seek(extra_size, 1) 1414 # add block to roots if flagged as such 1415 if is_root: 1416 self.roots.append(block) 1417 # check if we are done 1418 block_num += 1 1419 if self.version >= 0x0303000D: 1420 if block_num >= self.header.num_blocks: 1421 break 1422 1423 # read footer 1424 ftr = NifFormat.Footer() 1425 ftr.read(stream, self) 1426 1427 # check if we are at the end of the file 1428 if stream.read(1): 1429 logger.error( 1430 'End of file not reached: corrupt nif file?') 1431 1432 # fix links in blocks and footer (header has no links) 1433 for block in self.blocks: 1434 block.fix_links(self) 1435 ftr.fix_links(self) 1436 # the link stack should be empty now 1437 if self._link_stack: 1438 raise NifFormat.NifError('not all links have been popped from the stack (bug?)') 1439 # add root objects in footer to roots list 1440 if self.version >= 0x0303000D: 1441 for root in ftr.roots: 1442 self.roots.append(root)
1443
1444 - def write(self, stream):
1445 """Write a nif file. The L{header} and the L{blocks} are recalculated 1446 from the tree at L{roots} (e.g. list of block types, number of blocks, 1447 list of block types, list of strings, list of block sizes etc.). 1448 1449 :param stream: The stream to which to write. 1450 :type stream: file 1451 """ 1452 logger = logging.getLogger("pyffi.nif.data") 1453 # set up index and type dictionary 1454 self.blocks = [] # list of all blocks to be written 1455 self._block_index_dct = {} # maps block to block index 1456 block_type_list = [] # list of all block type strings 1457 block_type_dct = {} # maps block to block type string index 1458 self._string_list = [] 1459 for root in self.roots: 1460 self._makeBlockList(root, 1461 self._block_index_dct, 1462 block_type_list, block_type_dct) 1463 for block in root.tree(): 1464 self._string_list.extend( 1465 block.get_strings(self)) 1466 self._string_list = list(set(self._string_list)) # ensure unique elements 1467 #print(self._string_list) # debug 1468 1469 self.header.user_version = self.user_version # TODO dedicated type for user_version similar to FileVersion 1470 # for oblivion CS; apparently this is the version of the bhk blocks 1471 self.header.user_version_2 = self.user_version2 1472 self.header.num_blocks = len(self.blocks) 1473 self.header.num_block_types = len(block_type_list) 1474 self.header.block_types.update_size() 1475 for i, block_type in enumerate(block_type_list): 1476 self.header.block_types[i] = block_type 1477 self.header.block_type_index.update_size() 1478 for i, block in enumerate(self.blocks): 1479 self.header.block_type_index[i] = block_type_dct[block] 1480 self.header.num_strings = len(self._string_list) 1481 if self._string_list: 1482 self.header.max_string_length = max([len(s) for s in self._string_list]) 1483 else: 1484 self.header.max_string_length = 0 1485 self.header.strings.update_size() 1486 for i, s in enumerate(self._string_list): 1487 self.header.strings[i] = s 1488 self.header.block_size.update_size() 1489 for i, block in enumerate(self.blocks): 1490 self.header.block_size[i] = block.get_size(data=self) 1491 #if verbose >= 2: 1492 # print(hdr) 1493 1494 # set up footer 1495 ftr = NifFormat.Footer() 1496 ftr.num_roots = len(self.roots) 1497 ftr.roots.update_size() 1498 for i, root in enumerate(self.roots): 1499 ftr.roots[i] = root 1500 1501 # write the file 1502 logger.debug("Writing header") 1503 #logger.debug("%s" % self.header) 1504 self.header.write(stream, self) 1505 for block in self.blocks: 1506 # signal top level object if block is a root object 1507 if self.version < 0x0303000D and block in self.roots: 1508 s = NifFormat.SizedString() 1509 s.set_value("Top Level Object") 1510 s.write(stream, self) 1511 if self.version >= 0x05000001: 1512 if self.version <= 0x0A01006A: 1513 # write zero dummy separator 1514 stream.write('\x00\x00\x00\x00'.encode("ascii")) 1515 else: 1516 # write block type string 1517 s = NifFormat.SizedString() 1518 assert(block_type_list[block_type_dct[block]] 1519 == block.__class__.__name__) # debug 1520 s.set_value(block.__class__.__name__) 1521 s.write(stream, self) 1522 # write block index 1523 logger.debug("Writing %s block" % block.__class__.__name__) 1524 if self.version < 0x0303000D: 1525 stream.write(struct.pack(self._byte_order + 'i', 1526 self._block_index_dct[block])) 1527 # write block 1528 block.write(stream, self) 1529 if self.version < 0x0303000D: 1530 s = NifFormat.SizedString() 1531 s.set_value("End Of File") 1532 s.write(stream) 1533 ftr.write(stream, self)
1534
1535 - def _makeBlockList( 1536 self, root, block_index_dct, block_type_list, block_type_dct):
1537 """This is a helper function for write to set up the list of all blocks, 1538 the block index map, and the block type map. 1539 1540 :param root: The root block, whose tree is to be added to 1541 the block list. 1542 :type root: L{NifFormat.NiObject} 1543 :param block_index_dct: Dictionary mapping blocks in self.blocks to 1544 their block index. 1545 :type block_index_dct: dict 1546 :param block_type_list: List of all block types. 1547 :type block_type_list: list of str 1548 :param block_type_dct: Dictionary mapping blocks in self.blocks to 1549 their block type index. 1550 :type block_type_dct: dict 1551 """ 1552 1553 def _blockChildBeforeParent(block): 1554 """Determine whether block comes before its parent or not, depending 1555 on the block type. 1556 1557 @todo: Move to the L{NifFormat.Data} class. 1558 1559 :param block: The block to test. 1560 :type block: L{NifFormat.NiObject} 1561 :return: ``True`` if child should come first, ``False`` otherwise. 1562 """ 1563 return (isinstance(block, NifFormat.bhkRefObject) 1564 and not isinstance(block, NifFormat.bhkConstraint))
1565 1566 # block already listed? if so, return 1567 if root in self.blocks: 1568 return 1569 # add block type to block type dictionary 1570 block_type = root.__class__.__name__ 1571 # special case: NiDataStream stores part of data in block type list 1572 if block_type == "NiDataStream": 1573 block_type = ("NiDataStream\x01%i\x01%i" 1574 % (root.usage, root.access.to_int(self))) 1575 try: 1576 block_type_dct[root] = block_type_list.index(block_type) 1577 except ValueError: 1578 block_type_dct[root] = len(block_type_list) 1579 block_type_list.append(block_type) 1580 1581 # special case: add bhkConstraint entities before bhkConstraint 1582 # (these are actually links, not refs) 1583 if isinstance(root, NifFormat.bhkConstraint): 1584 for entity in root.entities: 1585 self._makeBlockList( 1586 entity, block_index_dct, block_type_list, block_type_dct) 1587 1588 # add children that come before the block 1589 for child in root.get_refs(data=self): 1590 if _blockChildBeforeParent(child): 1591 self._makeBlockList( 1592 child, block_index_dct, block_type_list, block_type_dct) 1593 1594 # add the block 1595 if self.version >= 0x0303000D: 1596 block_index_dct[root] = len(self.blocks) 1597 else: 1598 block_index_dct[root] = id(root) 1599 self.blocks.append(root) 1600 1601 # add children that come after the block 1602 for child in root.get_refs(data=self): 1603 if not _blockChildBeforeParent(child): 1604 self._makeBlockList( 1605 child, block_index_dct, block_type_list, block_type_dct)
1606 1607 # extensions of generated structures 1608
1609 - class Footer:
1610 - def read(self, stream, data):
1611 StructBase.read(self, stream, data) 1612 modification = getattr(data, 'modification', None) 1613 if modification == "neosteam": 1614 extrabyte, = struct.unpack("<B", stream.read(1)) 1615 if extrabyte != 0: 1616 raise ValueError( 1617 "Expected trailing zero byte in footer, " 1618 "but got %i instead." % extrabyte)
1619
1620 - def write(self, stream, data):
1621 StructBase.write(self, stream, data) 1622 modification = getattr(data, 'modification', None) 1623 if modification == "neosteam": 1624 stream.write("\x00".encode("ascii"))
1625 1626
1627 - class Header:
1628 - def has_block_type(self, block_type):
1629 """Check if header has a particular block type. 1630 1631 :raise ``ValueError``: If number of block types is zero 1632 (only nif versions 10.0.1.0 and up store block types 1633 in header). 1634 1635 :param block_type: The block type. 1636 :type block_type: L{NifFormat.NiObject} 1637 :return: ``True`` if the header's list of block types has the given 1638 block type, or a subclass of it. ``False`` otherwise. 1639 :rtype: ``bool`` 1640 """ 1641 # check if we can check the block types at all 1642 if self.num_block_types == 0: 1643 raise ValueError("header does not store any block types") 1644 # quick first check, without hierarchy, using simple string comparisons 1645 if block_type.__name__.encode() in self.block_types: 1646 return True 1647 # slower check, using isinstance 1648 for data_block_type in self.block_types: 1649 data_block_type = data_block_type.decode("ascii") 1650 # NiDataStreams are special 1651 if data_block_type.startswith("NiDataStream\x01"): 1652 data_block_type = "NiDataStream" 1653 if issubclass(getattr(NifFormat, data_block_type), block_type): 1654 return True 1655 # requested block type is not in nif 1656 return False
1657
1658 - class Matrix33:
1659 - def as_list(self):
1660 """Return matrix as 3x3 list.""" 1661 return [ 1662 [self.m_11, self.m_12, self.m_13], 1663 [self.m_21, self.m_22, self.m_23], 1664 [self.m_31, self.m_32, self.m_33] 1665 ]
1666
1667 - def as_tuple(self):
1668 """Return matrix as 3x3 tuple.""" 1669 return ( 1670 (self.m_11, self.m_12, self.m_13), 1671 (self.m_21, self.m_22, self.m_23), 1672 (self.m_31, self.m_32, self.m_33) 1673 )
1674
1675 - def __str__(self):
1676 return ( 1677 "[ %6.3f %6.3f %6.3f ]\n" 1678 "[ %6.3f %6.3f %6.3f ]\n" 1679 "[ %6.3f %6.3f %6.3f ]\n" 1680 % (self.m_11, self.m_12, self.m_13, 1681 self.m_21, self.m_22, self.m_23, 1682 self.m_31, self.m_32, self.m_33))
1683
1684 - def set_identity(self):
1685 """Set to identity matrix.""" 1686 self.m_11 = 1.0 1687 self.m_12 = 0.0 1688 self.m_13 = 0.0 1689 self.m_21 = 0.0 1690 self.m_22 = 1.0 1691 self.m_23 = 0.0 1692 self.m_31 = 0.0 1693 self.m_32 = 0.0 1694 self.m_33 = 1.0
1695
1696 - def is_identity(self):
1697 """Return ``True`` if the matrix is close to identity.""" 1698 if (abs(self.m_11 - 1.0) > NifFormat.EPSILON 1699 or abs(self.m_12) > NifFormat.EPSILON 1700 or abs(self.m_13) > NifFormat.EPSILON 1701 or abs(self.m_21) > NifFormat.EPSILON 1702 or abs(self.m_22 - 1.0) > NifFormat.EPSILON 1703 or abs(self.m_23) > NifFormat.EPSILON 1704 or abs(self.m_31) > NifFormat.EPSILON 1705 or abs(self.m_32) > NifFormat.EPSILON 1706 or abs(self.m_33 - 1.0) > NifFormat.EPSILON): 1707 return False 1708 else: 1709 return True
1710
1711 - def get_copy(self):
1712 """Return a copy of the matrix.""" 1713 mat = NifFormat.Matrix33() 1714 mat.m_11 = self.m_11 1715 mat.m_12 = self.m_12 1716 mat.m_13 = self.m_13 1717 mat.m_21 = self.m_21 1718 mat.m_22 = self.m_22 1719 mat.m_23 = self.m_23 1720 mat.m_31 = self.m_31 1721 mat.m_32 = self.m_32 1722 mat.m_33 = self.m_33 1723 return mat
1724
1725 - def get_transpose(self):
1726 """Get transposed of the matrix.""" 1727 mat = NifFormat.Matrix33() 1728 mat.m_11 = self.m_11 1729 mat.m_12 = self.m_21 1730 mat.m_13 = self.m_31 1731 mat.m_21 = self.m_12 1732 mat.m_22 = self.m_22 1733 mat.m_23 = self.m_32 1734 mat.m_31 = self.m_13 1735 mat.m_32 = self.m_23 1736 mat.m_33 = self.m_33 1737 return mat
1738
1739 - def is_scale_rotation(self):
1740 """Returns true if the matrix decomposes nicely into scale * rotation.""" 1741 # NOTE: 0.01 instead of NifFormat.EPSILON to work around bad nif files 1742 1743 # calculate self * self^T 1744 # this should correspond to 1745 # (scale * rotation) * (scale * rotation)^T 1746 # = scale^2 * rotation * rotation^T 1747 # = scale^2 * 3x3 identity matrix 1748 self_transpose = self.get_transpose() 1749 mat = self * self_transpose 1750 1751 # off diagonal elements should be zero 1752 if (abs(mat.m_12) + abs(mat.m_13) 1753 + abs(mat.m_21) + abs(mat.m_23) 1754 + abs(mat.m_31) + abs(mat.m_32)) > 0.01: 1755 return False 1756 1757 # diagonal elements should be equal (to scale^2) 1758 if abs(mat.m_11 - mat.m_22) + abs(mat.m_22 - mat.m_33) > 0.01: 1759 return False 1760 1761 return True
1762
1763 - def is_rotation(self):
1764 """Returns ``True`` if the matrix is a rotation matrix 1765 (a member of SO(3)).""" 1766 # NOTE: 0.01 instead of NifFormat.EPSILON to work around bad nif files 1767 1768 if not self.is_scale_rotation(): 1769 return False 1770 if abs(self.get_determinant() - 1.0) > 0.01: 1771 return False 1772 return True
1773
1774 - def get_determinant(self):
1775 """Return determinant.""" 1776 return (self.m_11*self.m_22*self.m_33 1777 +self.m_12*self.m_23*self.m_31 1778 +self.m_13*self.m_21*self.m_32 1779 -self.m_31*self.m_22*self.m_13 1780 -self.m_21*self.m_12*self.m_33 1781 -self.m_11*self.m_32*self.m_23)
1782
1783 - def get_scale(self):
1784 """Gets the scale (assuming is_scale_rotation is true!).""" 1785 scale = self.get_determinant() 1786 if scale < 0: 1787 return -((-scale)**(1.0/3.0)) 1788 else: 1789 return scale**(1.0/3.0)
1790
1791 - def get_scale_rotation(self):
1792 """Decompose the matrix into scale and rotation, where scale is a float 1793 and rotation is a C{Matrix33}. Returns a pair (scale, rotation).""" 1794 rot = self.get_copy() 1795 scale = self.get_scale() 1796 if abs(scale) < NifFormat.EPSILON: 1797 raise ZeroDivisionError('scale is zero, unable to obtain rotation') 1798 rot /= scale 1799 return (scale, rot)
1800
1801 - def set_scale_rotation(self, scale, rotation):
1802 """Compose the matrix as the product of scale * rotation.""" 1803 if not isinstance(scale, (float, int, long)): 1804 raise TypeError('scale must be float') 1805 if not isinstance(rotation, NifFormat.Matrix33): 1806 raise TypeError('rotation must be Matrix33') 1807 1808 if not rotation.is_rotation(): 1809 raise ValueError('rotation must be rotation matrix') 1810 1811 self.m_11 = rotation.m_11 * scale 1812 self.m_12 = rotation.m_12 * scale 1813 self.m_13 = rotation.m_13 * scale 1814 self.m_21 = rotation.m_21 * scale 1815 self.m_22 = rotation.m_22 * scale 1816 self.m_23 = rotation.m_23 * scale 1817 self.m_31 = rotation.m_31 * scale 1818 self.m_32 = rotation.m_32 * scale 1819 self.m_33 = rotation.m_33 * scale
1820
1821 - def get_scale_quat(self):
1822 """Decompose matrix into scale and quaternion.""" 1823 scale, rot = self.get_scale_rotation() 1824 quat = NifFormat.Quaternion() 1825 trace = 1.0 + rot.m_11 + rot.m_22 + rot.m_33 1826 1827 if trace > NifFormat.EPSILON: 1828 s = (trace ** 0.5) * 2 1829 quat.x = -( rot.m_32 - rot.m_23 ) / s 1830 quat.y = -( rot.m_13 - rot.m_31 ) / s 1831 quat.z = -( rot.m_21 - rot.m_12 ) / s 1832 quat.w = 0.25 * s 1833 elif rot.m_11 > max((rot.m_22, rot.m_33)): 1834 s = (( 1.0 + rot.m_11 - rot.m_22 - rot.m_33 ) ** 0.5) * 2 1835 quat.x = 0.25 * s 1836 quat.y = (rot.m_21 + rot.m_12 ) / s 1837 quat.z = (rot.m_13 + rot.m_31 ) / s 1838 quat.w = -(rot.m_32 - rot.m_23 ) / s 1839 elif rot.m_22 > rot.m_33: 1840 s = (( 1.0 + rot.m_22 - rot.m_11 - rot.m_33 ) ** 0.5) * 2 1841 quat.x = (rot.m_21 + rot.m_12 ) / s 1842 quat.y = 0.25 * s 1843 quat.z = (rot.m_32 + rot.m_23 ) / s 1844 quat.w = -(rot.m_13 - rot.m_31 ) / s 1845 else: 1846 s = (( 1.0 + rot.m_33 - rot.m_11 - rot.m_22 ) ** 0.5) * 2 1847 quat.x = (rot.m_13 + rot.m_31 ) / s 1848 quat.y = (rot.m_32 + rot.m_23 ) / s 1849 quat.z = 0.25 * s 1850 quat.w = -(rot.m_21 - rot.m_12 ) / s 1851 1852 return scale, quat
1853 1854
1855 - def get_inverse(self):
1856 """Get inverse (assuming is_scale_rotation is true!).""" 1857 # transpose inverts rotation but keeps the scale 1858 # dividing by scale^2 inverts the scale as well 1859 return self.get_transpose() / (self.m_11**2 + self.m_12**2 + self.m_13**2)
1860
1861 - def __mul__(self, rhs):
1862 if isinstance(rhs, (float, int, long)): 1863 mat = NifFormat.Matrix33() 1864 mat.m_11 = self.m_11 * rhs 1865 mat.m_12 = self.m_12 * rhs 1866 mat.m_13 = self.m_13 * rhs 1867 mat.m_21 = self.m_21 * rhs 1868 mat.m_22 = self.m_22 * rhs 1869 mat.m_23 = self.m_23 * rhs 1870 mat.m_31 = self.m_31 * rhs 1871 mat.m_32 = self.m_32 * rhs 1872 mat.m_33 = self.m_33 * rhs 1873 return mat 1874 elif isinstance(rhs, NifFormat.Vector3): 1875 raise TypeError( 1876 "matrix*vector not supported; " 1877 "please use left multiplication (vector*matrix)") 1878 elif isinstance(rhs, NifFormat.Matrix33): 1879 mat = NifFormat.Matrix33() 1880 mat.m_11 = self.m_11 * rhs.m_11 + self.m_12 * rhs.m_21 + self.m_13 * rhs.m_31 1881 mat.m_12 = self.m_11 * rhs.m_12 + self.m_12 * rhs.m_22 + self.m_13 * rhs.m_32 1882 mat.m_13 = self.m_11 * rhs.m_13 + self.m_12 * rhs.m_23 + self.m_13 * rhs.m_33 1883 mat.m_21 = self.m_21 * rhs.m_11 + self.m_22 * rhs.m_21 + self.m_23 * rhs.m_31 1884 mat.m_22 = self.m_21 * rhs.m_12 + self.m_22 * rhs.m_22 + self.m_23 * rhs.m_32 1885 mat.m_23 = self.m_21 * rhs.m_13 + self.m_22 * rhs.m_23 + self.m_23 * rhs.m_33 1886 mat.m_31 = self.m_31 * rhs.m_11 + self.m_32 * rhs.m_21 + self.m_33 * rhs.m_31 1887 mat.m_32 = self.m_31 * rhs.m_12 + self.m_32 * rhs.m_22 + self.m_33 * rhs.m_32 1888 mat.m_33 = self.m_31 * rhs.m_13 + self.m_32 * rhs.m_23 + self.m_33 * rhs.m_33 1889 return mat 1890 else: 1891 raise TypeError( 1892 "do not know how to multiply Matrix33 with %s"%rhs.__class__)
1893
1894 - def __div__(self, rhs):
1895 if isinstance(rhs, (float, int, long)): 1896 mat = NifFormat.Matrix33() 1897 mat.m_11 = self.m_11 / rhs 1898 mat.m_12 = self.m_12 / rhs 1899 mat.m_13 = self.m_13 / rhs 1900 mat.m_21 = self.m_21 / rhs 1901 mat.m_22 = self.m_22 / rhs 1902 mat.m_23 = self.m_23 / rhs 1903 mat.m_31 = self.m_31 / rhs 1904 mat.m_32 = self.m_32 / rhs 1905 mat.m_33 = self.m_33 / rhs 1906 return mat 1907 else: 1908 raise TypeError( 1909 "do not know how to divide Matrix33 by %s"%rhs.__class__)
1910 1911 # py3k 1912 __truediv__ = __div__ 1913
1914 - def __rmul__(self, lhs):
1915 if isinstance(lhs, (float, int, long)): 1916 return self * lhs # commutes 1917 else: 1918 raise TypeError( 1919 "do not know how to multiply %s with Matrix33"%lhs.__class__)
1920
1921 - def __eq__(self, mat):
1922 if not isinstance(mat, NifFormat.Matrix33): 1923 raise TypeError( 1924 "do not know how to compare Matrix33 and %s"%mat.__class__) 1925 if (abs(self.m_11 - mat.m_11) > NifFormat.EPSILON 1926 or abs(self.m_12 - mat.m_12) > NifFormat.EPSILON 1927 or abs(self.m_13 - mat.m_13) > NifFormat.EPSILON 1928 or abs(self.m_21 - mat.m_21) > NifFormat.EPSILON 1929 or abs(self.m_22 - mat.m_22) > NifFormat.EPSILON 1930 or abs(self.m_23 - mat.m_23) > NifFormat.EPSILON 1931 or abs(self.m_31 - mat.m_31) > NifFormat.EPSILON 1932 or abs(self.m_32 - mat.m_32) > NifFormat.EPSILON 1933 or abs(self.m_33 - mat.m_33) > NifFormat.EPSILON): 1934 return False 1935 return True
1936
1937 - def __ne__(self, mat):
1938 return not self.__eq__(mat)
1939
1940 - def __sub__(self, x):
1941 if isinstance(x, (NifFormat.Matrix33)): 1942 m = NifFormat.Matrix33() 1943 m.m_11 = self.m_11 - x.m_11 1944 m.m_12 = self.m_12 - x.m_12 1945 m.m_13 = self.m_13 - x.m_13 1946 m.m_21 = self.m_21 - x.m_21 1947 m.m_22 = self.m_22 - x.m_22 1948 m.m_23 = self.m_23 - x.m_23 1949 m.m_31 = self.m_31 - x.m_31 1950 m.m_32 = self.m_32 - x.m_32 1951 m.m_33 = self.m_33 - x.m_33 1952 return m 1953 elif isinstance(x, (int, long, float)): 1954 m = NifFormat.Matrix33() 1955 m.m_11 = self.m_11 - x 1956 m.m_12 = self.m_12 - x 1957 m.m_13 = self.m_13 - x 1958 m.m_21 = self.m_21 - x 1959 m.m_22 = self.m_22 - x 1960 m.m_23 = self.m_23 - x 1961 m.m_31 = self.m_31 - x 1962 m.m_32 = self.m_32 - x 1963 m.m_33 = self.m_33 - x 1964 return m 1965 else: 1966 raise TypeError("do not know how to substract Matrix33 and %s" 1967 % x.__class__)
1968
1969 - def sup_norm(self):
1970 """Calculate supremum norm of matrix (maximum absolute value of all 1971 entries).""" 1972 return max(max(abs(elem) for elem in row) 1973 for row in self.as_list())
1974
1975 - class Vector3:
1976 - def as_list(self):
1977 return [self.x, self.y, self.z]
1978
1979 - def as_tuple(self):
1980 return (self.x, self.y, self.z)
1981
1982 - def norm(self):
1983 return (self.x*self.x + self.y*self.y + self.z*self.z) ** 0.5
1984
1985 - def normalize(self, ignore_error=False):
1986 norm = self.norm() 1987 if norm < NifFormat.EPSILON: 1988 if not ignore_error: 1989 raise ZeroDivisionError('cannot normalize vector %s'%self) 1990 else: 1991 return 1992 self.x /= norm 1993 self.y /= norm 1994 self.z /= norm
1995
1996 - def normalized(self, ignore_error=False):
1997 vec = self.get_copy() 1998 vec.normalize(ignore_error=ignore_error) 1999 return vec
2000
2001 - def get_copy(self):
2002 v = NifFormat.Vector3() 2003 v.x = self.x 2004 v.y = self.y 2005 v.z = self.z 2006 return v
2007
2008 - def __str__(self):
2009 return "[ %6.3f %6.3f %6.3f ]"%(self.x, self.y, self.z)
2010
2011 - def __mul__(self, x):
2012 if isinstance(x, (float, int, long)): 2013 v = NifFormat.Vector3() 2014 v.x = self.x * x 2015 v.y = self.y * x 2016 v.z = self.z * x 2017 return v 2018 elif isinstance(x, NifFormat.Vector3): 2019 return self.x * x.x + self.y * x.y + self.z * x.z 2020 elif isinstance(x, NifFormat.Matrix33): 2021 v = NifFormat.Vector3() 2022 v.x = self.x * x.m_11 + self.y * x.m_21 + self.z * x.m_31 2023 v.y = self.x * x.m_12 + self.y * x.m_22 + self.z * x.m_32 2024 v.z = self.x * x.m_13 + self.y * x.m_23 + self.z * x.m_33 2025 return v 2026 elif isinstance(x, NifFormat.Matrix44): 2027 return self * x.get_matrix_33() + x.get_translation() 2028 else: 2029 raise TypeError("do not know how to multiply Vector3 with %s"%x.__class__)
2030
2031 - def __rmul__(self, x):
2032 if isinstance(x, (float, int, long)): 2033 v = NifFormat.Vector3() 2034 v.x = x * self.x 2035 v.y = x * self.y 2036 v.z = x * self.z 2037 return v 2038 else: 2039 raise TypeError("do not know how to multiply %s and Vector3"%x.__class__)
2040
2041 - def __div__(self, x):
2042 if isinstance(x, (float, int, long)): 2043 v = NifFormat.Vector3() 2044 v.x = self.x / x 2045 v.y = self.y / x 2046 v.z = self.z / x 2047 return v 2048 else: 2049 raise TypeError("do not know how to divide Vector3 and %s"%x.__class__)
2050 2051 # py3k 2052 __truediv__ = __div__ 2053
2054 - def __add__(self, x):
2055 if isinstance(x, (float, int, long)): 2056 v = NifFormat.Vector3() 2057 v.x = self.x + x 2058 v.y = self.y + x 2059 v.z = self.z + x 2060 return v 2061 elif isinstance(x, NifFormat.Vector3): 2062 v = NifFormat.Vector3() 2063 v.x = self.x + x.x 2064 v.y = self.y + x.y 2065 v.z = self.z + x.z 2066 return v 2067 else: 2068 raise TypeError("do not know how to add Vector3 and %s"%x.__class__)
2069
2070 - def __radd__(self, x):
2071 if isinstance(x, (float, int, long)): 2072 v = NifFormat.Vector3() 2073 v.x = x + self.x 2074 v.y = x + self.y 2075 v.z = x + self.z 2076 return v 2077 else: 2078 raise TypeError("do not know how to add %s and Vector3"%x.__class__)
2079
2080 - def __sub__(self, x):
2081 if isinstance(x, (float, int, long)): 2082 v = NifFormat.Vector3() 2083 v.x = self.x - x 2084 v.y = self.y - x 2085 v.z = self.z - x 2086 return v 2087 elif isinstance(x, NifFormat.Vector3): 2088 v = NifFormat.Vector3() 2089 v.x = self.x - x.x 2090 v.y = self.y - x.y 2091 v.z = self.z - x.z 2092 return v 2093 else: 2094 raise TypeError("do not know how to substract Vector3 and %s"%x.__class__)
2095
2096 - def __rsub__(self, x):
2097 if isinstance(x, (float, int, long)): 2098 v = NifFormat.Vector3() 2099 v.x = x - self.x 2100 v.y = x - self.y 2101 v.z = x - self.z 2102 return v 2103 else: 2104 raise TypeError("do not know how to substract %s and Vector3"%x.__class__)
2105
2106 - def __neg__(self):
2107 v = NifFormat.Vector3() 2108 v.x = -self.x 2109 v.y = -self.y 2110 v.z = -self.z 2111 return v
2112 2113 # cross product
2114 - def crossproduct(self, x):
2115 if isinstance(x, NifFormat.Vector3): 2116 v = NifFormat.Vector3() 2117 v.x = self.y*x.z - self.z*x.y 2118 v.y = self.z*x.x - self.x*x.z 2119 v.z = self.x*x.y - self.y*x.x 2120 return v 2121 else: 2122 raise TypeError("do not know how to calculate crossproduct of Vector3 and %s"%x.__class__)
2123
2124 - def __eq__(self, x):
2125 if isinstance(x, type(None)): 2126 return False 2127 if not isinstance(x, NifFormat.Vector3): 2128 raise TypeError("do not know how to compare Vector3 and %s"%x.__class__) 2129 if abs(self.x - x.x) > NifFormat.EPSILON: return False 2130 if abs(self.y - x.y) > NifFormat.EPSILON: return False 2131 if abs(self.z - x.z) > NifFormat.EPSILON: return False 2132 return True
2133
2134 - def __ne__(self, x):
2135 return not self.__eq__(x)
2136
2137 - class Vector4:
2138 """ 2139 >>> from pyffi.formats.nif import NifFormat 2140 >>> vec = NifFormat.Vector4() 2141 >>> vec.x = 1.0 2142 >>> vec.y = 2.0 2143 >>> vec.z = 3.0 2144 >>> vec.w = 4.0 2145 >>> print(vec) 2146 [ 1.000 2.000 3.000 4.000 ] 2147 >>> vec.as_list() 2148 [1.0, 2.0, 3.0, 4.0] 2149 >>> vec.as_tuple() 2150 (1.0, 2.0, 3.0, 4.0) 2151 >>> print(vec.get_vector_3()) 2152 [ 1.000 2.000 3.000 ] 2153 >>> vec2 = NifFormat.Vector4() 2154 >>> vec == vec2 2155 False 2156 >>> vec2.x = 1.0 2157 >>> vec2.y = 2.0 2158 >>> vec2.z = 3.0 2159 >>> vec2.w = 4.0 2160 >>> vec == vec2 2161 True 2162 """ 2163
2164 - def as_list(self):
2165 return [self.x, self.y, self.z, self.w]
2166
2167 - def as_tuple(self):
2168 return (self.x, self.y, self.z, self.w)
2169
2170 - def get_copy(self):
2171 v = NifFormat.Vector4() 2172 v.x = self.x 2173 v.y = self.y 2174 v.z = self.z 2175 v.w = self.w 2176 return v
2177
2178 - def get_vector_3(self):
2179 v = NifFormat.Vector3() 2180 v.x = self.x 2181 v.y = self.y 2182 v.z = self.z 2183 return v
2184
2185 - def __str__(self):
2186 return "[ %6.3f %6.3f %6.3f %6.3f ]"%(self.x, self.y, self.z, self.w)
2187
2188 - def __eq__(self, rhs):
2189 if isinstance(rhs, type(None)): 2190 return False 2191 if not isinstance(rhs, NifFormat.Vector4): 2192 raise TypeError( 2193 "do not know how to compare Vector4 and %s" % rhs.__class__) 2194 if abs(self.x - rhs.x) > NifFormat.EPSILON: return False 2195 if abs(self.y - rhs.y) > NifFormat.EPSILON: return False 2196 if abs(self.z - rhs.z) > NifFormat.EPSILON: return False 2197 if abs(self.w - rhs.w) > NifFormat.EPSILON: return False 2198 return True
2199
2200 - def __ne__(self, rhs):
2201 return not self.__eq__(rhs)
2202
2203 - class SkinPartition:
2204 - def get_triangles(self):
2205 """Get list of triangles of this partition. 2206 """ 2207 # strips? 2208 if self.num_strips: 2209 for tri in pyffi.utils.tristrip.triangulate(self.strips): 2210 yield tri 2211 # no strips, do triangles 2212 else: 2213 for tri in self.triangles: 2214 yield (tri.v_1, tri.v_2, tri.v_3)
2215
2216 - def get_mapped_triangles(self):
2217 """Get list of triangles of this partition (mapping into the 2218 geometry data vertex list). 2219 """ 2220 for tri in self.get_triangles(): 2221 yield tuple(self.vertex_map[v_index] for v_index in tri)
2222
2223 - class bhkBoxShape:
2224 - def apply_scale(self, scale):
2225 """Apply scale factor C{scale} on data.""" 2226 # apply scale on dimensions 2227 self.dimensions.x *= scale 2228 self.dimensions.y *= scale 2229 self.dimensions.z *= scale 2230 self.minimum_size *= scale
2231
2232 - def get_mass_center_inertia(self, density = 1, solid = True):
2233 """Return mass, center, and inertia tensor.""" 2234 # the dimensions describe half the size of the box in each dimension 2235 # so the length of a single edge is dimension.dir * 2 2236 mass, inertia = pyffi.utils.inertia.getMassInertiaBox( 2237 (self.dimensions.x * 2, self.dimensions.y * 2, self.dimensions.z * 2), 2238 density = density, solid = solid) 2239 return mass, (0,0,0), inertia
2240
2241 - class bhkCapsuleShape:
2242 - def apply_scale(self, scale):
2243 """Apply scale factor <scale> on data.""" 2244 # apply scale on dimensions 2245 self.radius *= scale 2246 self.radius_1 *= scale 2247 self.radius_2 *= scale 2248 self.first_point.x *= scale 2249 self.first_point.y *= scale 2250 self.first_point.z *= scale 2251 self.second_point.x *= scale 2252 self.second_point.y *= scale 2253 self.second_point.z *= scale
2254
2255 - def get_mass_center_inertia(self, density = 1, solid = True):
2256 """Return mass, center, and inertia tensor.""" 2257 # (assumes self.radius == self.radius_1 == self.radius_2) 2258 length = (self.first_point - self.second_point).norm() 2259 mass, inertia = pyffi.utils.inertia.getMassInertiaCapsule( 2260 radius = self.radius, length = length, 2261 density = density, solid = solid) 2262 # now fix inertia so it is expressed in the right coordinates 2263 # need a transform that maps (0,0,length/2) on (second - first) / 2 2264 # and (0,0,-length/2) on (first - second)/2 2265 vec1 = ((self.second_point - self.first_point) / length).as_tuple() 2266 # find an orthogonal vector to vec1 2267 index = min(enumerate(vec1), key=lambda val: abs(val[1]))[0] 2268 vec2 = vecCrossProduct(vec1, tuple((1 if i == index else 0) 2269 for i in xrange(3))) 2270 vec2 = vecscalarMul(vec2, 1/vecNorm(vec2)) 2271 # find an orthogonal vector to vec1 and vec2 2272 vec3 = vecCrossProduct(vec1, vec2) 2273 # get transform matrix 2274 transform_transposed = (vec2, vec3, vec1) # this is effectively the transposed of our transform 2275 transform = matTransposed(transform_transposed) 2276 # check the result (debug) 2277 assert(vecDistance(matvecMul(transform, (0,0,1)), vec1) < 0.0001) 2278 assert(abs(matDeterminant(transform) - 1) < 0.0001) 2279 # transform the inertia tensor 2280 inertia = matMul(matMul(transform_transposed, inertia), transform) 2281 return (mass, 2282 ((self.first_point + self.second_point) * 0.5).as_tuple(), 2283 inertia)
2284
2285 - class bhkConstraint:
2286 - def get_transform_a_b(self, parent):
2287 """Returns the transform of the first entity relative to the second 2288 entity. Root is simply a nif block that is a common parent to both 2289 blocks.""" 2290 # check entities 2291 if self.num_entities != 2: 2292 raise ValueError( 2293 "cannot get tranform for constraint " 2294 "that hasn't exactly 2 entities") 2295 # find transform of entity A relative to entity B 2296 2297 # find chains from parent to A and B entities 2298 chainA = parent.find_chain(self.entities[0]) 2299 chainB = parent.find_chain(self.entities[1]) 2300 # validate the chains 2301 assert(isinstance(chainA[-1], NifFormat.bhkRigidBody)) 2302 assert(isinstance(chainA[-2], NifFormat.NiCollisionObject)) 2303 assert(isinstance(chainA[-3], NifFormat.NiNode)) 2304 assert(isinstance(chainB[-1], NifFormat.bhkRigidBody)) 2305 assert(isinstance(chainB[-2], NifFormat.NiCollisionObject)) 2306 assert(isinstance(chainB[-3], NifFormat.NiNode)) 2307 # return the relative transform 2308 return (chainA[-3].get_transform(relative_to = parent) 2309 * chainB[-3].get_transform(relative_to = parent).get_inverse())
2310
2311 - class bhkConvexVerticesShape:
2312 - def apply_scale(self, scale):
2313 """Apply scale factor on data.""" 2314 if abs(scale - 1.0) < NifFormat.EPSILON: return 2315 for v in self.vertices: 2316 v.x *= scale 2317 v.y *= scale 2318 v.z *= scale 2319 for n in self.normals: 2320 n.w *= scale
2321
2322 - def get_mass_center_inertia(self, density = 1, solid = True):
2323 """Return mass, center, and inertia tensor.""" 2324 # first find an enumeration of all triangles making up the convex shape 2325 vertices, triangles = pyffi.utils.quickhull.qhull3d( 2326 [vert.as_tuple() for vert in self.vertices]) 2327 # now calculate mass, center, and inertia 2328 return pyffi.utils.inertia.get_mass_center_inertia_polyhedron( 2329 vertices, triangles, density = density, solid = solid)
2330
2331 - class bhkLimitedHingeConstraint:
2332 - def apply_scale(self, scale):
2333 """Scale data.""" 2334 # apply scale on transform 2335 self.limited_hinge.pivot_a.x *= scale 2336 self.limited_hinge.pivot_a.y *= scale 2337 self.limited_hinge.pivot_a.z *= scale 2338 self.limited_hinge.pivot_b.x *= scale 2339 self.limited_hinge.pivot_b.y *= scale 2340 self.limited_hinge.pivot_b.z *= scale
2341
2342 - def update_a_b(self, parent):
2343 """Update the B data from the A data. The parent argument is simply a 2344 common parent to the entities.""" 2345 self.limited_hinge.update_a_b(self.get_transform_a_b(parent))
2346
2347 - class bhkListShape:
2348 - def get_mass_center_inertia(self, density = 1, solid = True):
2349 """Return center of gravity and area.""" 2350 subshapes_mci = [ subshape.get_mass_center_inertia(density = density, 2351 solid = solid) 2352 for subshape in self.sub_shapes ] 2353 total_mass = 0 2354 total_center = (0, 0, 0) 2355 total_inertia = ((0, 0, 0), (0, 0, 0), (0, 0, 0)) 2356 2357 # get total mass 2358 for mass, center, inertia in subshapes_mci: 2359 total_mass += mass 2360 if total_mass == 0: 2361 return 0, (0, 0, 0), ((0, 0, 0), (0, 0, 0), (0, 0, 0)) 2362 2363 # get average center and inertia 2364 for mass, center, inertia in subshapes_mci: 2365 total_center = vecAdd(total_center, 2366 vecscalarMul(center, mass / total_mass)) 2367 total_inertia = matAdd(total_inertia, inertia) 2368 return total_mass, total_center, total_inertia
2369
2370 - def add_shape(self, shape, front = False):
2371 """Add shape to list.""" 2372 # check if it's already there 2373 if shape in self.sub_shapes: return 2374 # increase number of shapes 2375 num_shapes = self.num_sub_shapes 2376 self.num_sub_shapes = num_shapes + 1 2377 self.sub_shapes.update_size() 2378 # add the shape 2379 if not front: 2380 self.sub_shapes[num_shapes] = shape 2381 else: 2382 for i in xrange(num_shapes, 0, -1): 2383 self.sub_shapes[i] = self.sub_shapes[i-1] 2384 self.sub_shapes[0] = shape 2385 # expand list of unknown ints as well 2386 self.num_unknown_ints = num_shapes + 1 2387 self.unknown_ints.update_size()
2388
2389 - def remove_shape(self, shape):
2390 """Remove a shape from the shape list.""" 2391 # get list of shapes excluding the shape to remove 2392 shapes = [s for s in self.sub_shapes if s != shape] 2393 # set sub_shapes to this list 2394 self.num_sub_shapes = len(shapes) 2395 self.sub_shapes.update_size() 2396 for i, s in enumerate(shapes): 2397 self.sub_shapes[i] = s 2398 # update unknown ints 2399 self.num_unknown_ints = len(shapes) 2400 self.unknown_ints.update_size()
2401
2402 - class bhkMalleableConstraint:
2403 - def apply_scale(self, scale):
2404 """Scale data.""" 2405 # apply scale on transform 2406 self.ragdoll.pivot_a.x *= scale 2407 self.ragdoll.pivot_a.y *= scale 2408 self.ragdoll.pivot_a.z *= scale 2409 self.ragdoll.pivot_b.x *= scale 2410 self.ragdoll.pivot_b.y *= scale 2411 self.ragdoll.pivot_b.z *= scale 2412 self.limited_hinge.pivot_a.x *= scale 2413 self.limited_hinge.pivot_a.y *= scale 2414 self.limited_hinge.pivot_a.z *= scale 2415 self.limited_hinge.pivot_b.x *= scale 2416 self.limited_hinge.pivot_b.y *= scale 2417 self.limited_hinge.pivot_b.z *= scale
2418
2419 - def update_a_b(self, parent):
2420 """Update the B data from the A data.""" 2421 transform = self.get_transform_a_b(parent) 2422 self.limited_hinge.update_a_b(transform) 2423 self.ragdoll.update_a_b(transform)
2424
2425 - class bhkMoppBvTreeShape:
2426 - def get_mass_center_inertia(self, density=1, solid=True):
2427 """Return mass, center of gravity, and inertia tensor.""" 2428 return self.get_shape_mass_center_inertia( 2429 density=density, solid=solid)
2430
2431 - def update_origin_scale(self):
2432 """Update scale and origin.""" 2433 minx = min(v.x for v in self.shape.data.vertices) 2434 miny = min(v.y for v in self.shape.data.vertices) 2435 minz = min(v.z for v in self.shape.data.vertices) 2436 maxx = max(v.x for v in self.shape.data.vertices) 2437 maxy = max(v.y for v in self.shape.data.vertices) 2438 maxz = max(v.z for v in self.shape.data.vertices) 2439 self.origin.x = minx - 0.1 2440 self.origin.y = miny - 0.1 2441 self.origin.z = minz - 0.1 2442 self.scale = (256*256*254) / (0.2+max([maxx-minx,maxy-miny,maxz-minz]))
2443
2444 - def update_mopp(self):
2445 """Update the MOPP data, scale, and origin, and welding info. 2446 2447 @deprecated: use update_mopp_welding instead 2448 """ 2449 self.update_mopp_welding()
2450
2451 - def update_mopp_welding(self):
2452 """Update the MOPP data, scale, and origin, and welding info.""" 2453 logger = logging.getLogger("pyffi.mopp") 2454 # check type of shape 2455 if not isinstance(self.shape, NifFormat.bhkPackedNiTriStripsShape): 2456 raise ValueError( 2457 "expected bhkPackedNiTriStripsShape on mopp" 2458 " but got %s instead" % self.shape.__class__.__name__) 2459 # first try with pyffi.utils.mopp 2460 failed = False 2461 try: 2462 print(pyffi.utils.mopp.getMopperCredits()) 2463 except (OSError, RuntimeError): 2464 failed = True 2465 else: 2466 # find material indices per triangle 2467 material_per_vertex = [] 2468 for subshape in self.shape.get_sub_shapes(): 2469 material_per_vertex += ( 2470 [subshape.material] * subshape.num_vertices) 2471 material_per_triangle = [ 2472 material_per_vertex[hktri.triangle.v_1] 2473 for hktri in self.shape.data.triangles] 2474 # compute havok info 2475 try: 2476 origin, scale, mopp, welding_infos \ 2477 = pyffi.utils.mopp.getMopperOriginScaleCodeWelding( 2478 [vert.as_tuple() for vert in self.shape.data.vertices], 2479 [(hktri.triangle.v_1, 2480 hktri.triangle.v_2, 2481 hktri.triangle.v_3) 2482 for hktri in self.shape.data.triangles], 2483 material_per_triangle) 2484 except (OSError, RuntimeError): 2485 failed = True 2486 else: 2487 # must use calculated scale and origin 2488 self.scale = scale 2489 self.origin.x = origin[0] 2490 self.origin.y = origin[1] 2491 self.origin.z = origin[2] 2492 # if havok's mopper failed, do a simple mopp 2493 if failed: 2494 logger.exception( 2495 "Havok mopp generator failed, falling back on simple mopp " 2496 "(but collisions may be flawed in-game!)." 2497 "If you are using the PyFFI that was shipped with Blender, " 2498 "and you are on Windows, then you may wish to install the " 2499 "full version of PyFFI from " 2500 "http://pyffi.sourceforge.net/ " 2501 "instead, which includes the (closed source) " 2502 "Havok mopp generator.") 2503 self.update_origin_scale() 2504 mopp = self._makeSimpleMopp() 2505 # no welding info 2506 welding_infos = [] 2507 2508 # delete mopp and replace with new data 2509 self.mopp_data_size = len(mopp) 2510 self.mopp_data.update_size() 2511 for i, b in enumerate(mopp): 2512 self.mopp_data[i] = b 2513 2514 # update welding information 2515 for hktri, welding_info in izip(self.shape.data.triangles, welding_infos): 2516 hktri.welding_info = welding_info
2517
2518 - def _makeSimpleMopp(self):
2519 """Make a simple mopp.""" 2520 mopp = [] # the mopp 'assembly' script 2521 self._q = 256*256 / self.scale # quantization factor 2522 2523 # opcodes 2524 BOUNDX = 0x26 2525 BOUNDY = 0x27 2526 BOUNDZ = 0x28 2527 TESTX = 0x10 2528 TESTY = 0x11 2529 TESTZ = 0x12 2530 2531 # add first crude bounding box checks 2532 self._vertsceil = [ self._moppCeil(v) for v in self.shape.data.vertices ] 2533 self._vertsfloor = [ self._moppFloor(v) for v in self.shape.data.vertices ] 2534 minx = min([ v[0] for v in self._vertsfloor ]) 2535 miny = min([ v[1] for v in self._vertsfloor ]) 2536 minz = min([ v[2] for v in self._vertsfloor ]) 2537 maxx = max([ v[0] for v in self._vertsceil ]) 2538 maxy = max([ v[1] for v in self._vertsceil ]) 2539 maxz = max([ v[2] for v in self._vertsceil ]) 2540 if minx < 0 or miny < 0 or minz < 0: raise ValueError("cannot update mopp tree with invalid origin") 2541 if maxx > 255 or maxy > 255 or maxz > 255: raise ValueError("cannot update mopp tree with invalid scale") 2542 mopp.extend([BOUNDZ, minz, maxz]) 2543 mopp.extend([BOUNDY, miny, maxy]) 2544 mopp.extend([BOUNDX, minx, maxx]) 2545 2546 # add tree using subsequent X-Y-Z splits 2547 # (slow and no noticable difference from other simple tree so deactivated) 2548 #tris = range(len(self.shape.data.triangles)) 2549 #tree = self.split_triangles(tris, [[minx,maxx],[miny,maxy],[minz,maxz]]) 2550 #mopp += self.mopp_from_tree(tree) 2551 2552 # add a trivial tree 2553 # this prevents the player of walking through the model 2554 # but arrows may still fly through 2555 numtriangles = len(self.shape.data.triangles) 2556 i = 0x30 2557 for t in xrange(numtriangles-1): 2558 mopp.extend([TESTZ, maxz, 0, 1, i]) 2559 i += 1 2560 if i == 0x50: 2561 mopp.extend([0x09, 0x20]) # increment triangle offset 2562 i = 0x30 2563 mopp.extend([i]) 2564 2565 return mopp
2566
2567 - def _moppCeil(self, v):
2568 moppx = int((v.x + 0.1 - self.origin.x) / self._q + 0.99999999) 2569 moppy = int((v.y + 0.1 - self.origin.y) / self._q + 0.99999999) 2570 moppz = int((v.z + 0.1 - self.origin.z) / self._q + 0.99999999) 2571 return [moppx, moppy, moppz]
2572
2573 - def _moppFloor(self, v):
2574 moppx = int((v.x - 0.1 - self.origin.x) / self._q) 2575 moppy = int((v.y - 0.1 - self.origin.y) / self._q) 2576 moppz = int((v.z - 0.1 - self.origin.z) / self._q) 2577 return [moppx, moppy, moppz]
2578
2579 - def split_triangles(self, ts, bbox, dir=0):
2580 """Direction 0=X, 1=Y, 2=Z""" 2581 btest = [] # for bounding box tests 2582 test = [] # for branch command 2583 # check bounding box 2584 tris = [ t.triangle for t in self.shape.data.triangles ] 2585 tsverts = [ tris[t].v_1 for t in ts] + [ tris[t].v_2 for t in ts] + [ tris[t].v_3 for t in ts] 2586 minx = min([self._vertsfloor[v][0] for v in tsverts]) 2587 miny = min([self._vertsfloor[v][1] for v in tsverts]) 2588 minz = min([self._vertsfloor[v][2] for v in tsverts]) 2589 maxx = max([self._vertsceil[v][0] for v in tsverts]) 2590 maxy = max([self._vertsceil[v][1] for v in tsverts]) 2591 maxz = max([self._vertsceil[v][2] for v in tsverts]) 2592 # add bounding box checks if it's reduced in a direction 2593 if (maxx - minx < bbox[0][1] - bbox[0][0]): 2594 btest += [ 0x26, minx, maxx ] 2595 bbox[0][0] = minx 2596 bbox[0][1] = maxx 2597 if (maxy - miny < bbox[1][1] - bbox[1][0]): 2598 btest += [ 0x27, miny, maxy ] 2599 bbox[1][0] = miny 2600 bbox[1][1] = maxy 2601 if (maxz - minz < bbox[2][1] - bbox[2][0]): 2602 btest += [ 0x28, minz, maxz ] 2603 bbox[2][0] = minz 2604 bbox[2][1] = maxz 2605 # if only one triangle, no further split needed 2606 if len(ts) == 1: 2607 if ts[0] < 32: 2608 return [ btest, [ 0x30 + ts[0] ], [], [] ] 2609 elif ts[0] < 256: 2610 return [ btest, [ 0x50, ts[0] ], [], [] ] 2611 else: 2612 return [ btest, [ 0x51, ts[0] >> 8, ts[0] & 255 ], [], [] ] 2613 # sort triangles in required direction 2614 ts.sort(key = lambda t: max(self._vertsceil[tris[t].v_1][dir], self._vertsceil[tris[t].v_2][dir], self._vertsceil[tris[t].v_3][dir])) 2615 # split into two 2616 ts1 = ts[:len(ts)/2] 2617 ts2 = ts[len(ts)/2:] 2618 # get maximum coordinate of small group 2619 ts1verts = [ tris[t].v_1 for t in ts1] + [ tris[t].v_2 for t in ts1] + [ tris[t].v_3 for t in ts1] 2620 ts2verts = [ tris[t].v_1 for t in ts2] + [ tris[t].v_2 for t in ts2] + [ tris[t].v_3 for t in ts2] 2621 ts1max = max([self._vertsceil[v][dir] for v in ts1verts]) 2622 # get minimum coordinate of large group 2623 ts2min = min([self._vertsfloor[v][dir] for v in ts2verts]) 2624 # set up test 2625 test += [0x10+dir, ts1max, ts2min] 2626 # set up new bounding boxes for each subtree 2627 # make copy 2628 bbox1 = [[bbox[0][0],bbox[0][1]],[bbox[1][0],bbox[1][1]],[bbox[2][0],bbox[2][1]]] 2629 bbox2 = [[bbox[0][0],bbox[0][1]],[bbox[1][0],bbox[1][1]],[bbox[2][0],bbox[2][1]]] 2630 # update bound in test direction 2631 bbox1[dir][1] = ts1max 2632 bbox2[dir][0] = ts2min 2633 # return result 2634 nextdir = dir+1 2635 if nextdir == 3: nextdir = 0 2636 return [btest, test, self.split_triangles(ts1, bbox1, nextdir), self.split_triangles(ts2, bbox2, nextdir)]
2637
2638 - def mopp_from_tree(self, tree):
2639 if tree[1][0] in xrange(0x30, 0x52): 2640 return tree[0] + tree[1] 2641 mopp = tree[0] + tree[1] 2642 submopp1 = self.mopp_from_tree(tree[2]) 2643 submopp2 = self.mopp_from_tree(tree[3]) 2644 if len(submopp1) < 256: 2645 mopp += [ len(submopp1) ] 2646 mopp += submopp1 2647 mopp += submopp2 2648 else: 2649 jump = len(submopp2) 2650 if jump <= 255: 2651 mopp += [2, 0x05, jump] 2652 else: 2653 mopp += [3, 0x06, jump >> 8, jump & 255] 2654 mopp += submopp2 2655 mopp += submopp1 2656 return mopp
2657 2658 # ported and extended from NifVis/bhkMoppBvTreeShape.py
2659 - def parse_mopp(self, start = 0, depth = 0, toffset = 0, verbose = False):
2660 """The mopp data is printed to the debug channel 2661 while parsed. Returns list of indices into mopp data of the bytes 2662 processed and a list of triangle indices encountered. 2663 2664 The verbose argument is ignored (and is deprecated). 2665 """ 2666 class Message: 2667 def __init__(self): 2668 self.logger = logging.getLogger("pyffi.mopp") 2669 self.msg = ""
2670 2671 def append(self, *args): 2672 self.msg += " ".join(str(arg) for arg in args) + " " 2673 return self
2674 2675 def debug(self): 2676 if self.msg: 2677 self.logger.debug(self.msg) 2678 self.msg = "" 2679 2680 def error(self): 2681 self.logger.error(self.msg) 2682 self.msg = "" 2683 2684 mopp = self.mopp_data # shortcut notation 2685 ids = [] # indices of bytes processed 2686 tris = [] # triangle indices 2687 i = start # current index 2688 ret = False # set to True if an opcode signals a triangle index 2689 while i < self.mopp_data_size and not ret: 2690 # get opcode and print it 2691 code = mopp[i] 2692 msg = Message() 2693 msg.append("%4i:"%i + " "*depth + '0x%02X ' % code) 2694 2695 if code == 0x09: 2696 # increment triangle offset 2697 toffset += mopp[i+1] 2698 msg.append(mopp[i+1]) 2699 msg.append('%i [ triangle offset += %i, offset is now %i ]' 2700 % (mopp[i+1], mopp[i+1], toffset)) 2701 ids.extend([i,i+1]) 2702 i += 2 2703 2704 elif code in [ 0x0A ]: 2705 # increment triangle offset 2706 toffset += mopp[i+1]*256 + mopp[i+2] 2707 msg.append(mopp[i+1],mopp[i+2]) 2708 msg.append('[ triangle offset += %i, offset is now %i ]' 2709 % (mopp[i+1]*256 + mopp[i+2], toffset)) 2710 ids.extend([i,i+1,i+2]) 2711 i += 3 2712 2713 elif code in [ 0x0B ]: 2714 # unsure about first two arguments, but the 3rd and 4th set triangle offset 2715 toffset = 256*mopp[i+3] + mopp[i+4] 2716 msg.append(mopp[i+1],mopp[i+2],mopp[i+3],mopp[i+4]) 2717 msg.append('[ triangle offset = %i ]' % toffset) 2718 ids.extend([i,i+1,i+2,i+3,i+4]) 2719 i += 5 2720 2721 elif code in xrange(0x30,0x50): 2722 # triangle compact 2723 msg.append('[ triangle %i ]'%(code-0x30+toffset)) 2724 ids.append(i) 2725 tris.append(code-0x30+toffset) 2726 i += 1 2727 ret = True 2728 2729 elif code == 0x50: 2730 # triangle byte 2731 msg.append(mopp[i+1]) 2732 msg.append('[ triangle %i ]'%(mopp[i+1]+toffset)) 2733 ids.extend([i,i+1]) 2734 tris.append(mopp[i+1]+toffset) 2735 i += 2 2736 ret = True 2737 2738 elif code in [ 0x51 ]: 2739 # triangle short 2740 t = mopp[i+1]*256 + mopp[i+2] + toffset 2741 msg.append(mopp[i+1],mopp[i+2]) 2742 msg.append('[ triangle %i ]' % t) 2743 ids.extend([i,i+1,i+2]) 2744 tris.append(t) 2745 i += 3 2746 ret = True 2747 2748 elif code in [ 0x53 ]: 2749 # triangle short? 2750 t = mopp[i+3]*256 + mopp[i+4] + toffset 2751 msg.append(mopp[i+1],mopp[i+2],mopp[i+3],mopp[i+4]) 2752 msg.append('[ triangle %i ]' % t) 2753 ids.extend([i,i+1,i+2,i+3,i+4]) 2754 tris.append(t) 2755 i += 5 2756 ret = True 2757 2758 elif code in [ 0x05 ]: 2759 # byte jump 2760 msg.append('[ jump -> %i: ]'%(i+2+mopp[i+1])) 2761 ids.extend([i,i+1]) 2762 i += 2+mopp[i+1] 2763 2764 elif code in [ 0x06 ]: 2765 # short jump 2766 jump = mopp[i+1]*256 + mopp[i+2] 2767 msg.append('[ jump -> %i: ]'%(i+3+jump)) 2768 ids.extend([i,i+1,i+2]) 2769 i += 3+jump 2770 2771 elif code in [0x10,0x11,0x12, 0x13,0x14,0x15, 0x16,0x17,0x18, 0x19, 0x1A, 0x1B, 0x1C]: 2772 # compact if-then-else with two arguments 2773 msg.append(mopp[i+1], mopp[i+2]) 2774 if code == 0x10: 2775 msg.append('[ branch X') 2776 elif code == 0x11: 2777 msg.append('[ branch Y') 2778 elif code == 0x12: 2779 msg.append('[ branch Z') 2780 else: 2781 msg.append('[ branch ?') 2782 msg.append('-> %i: %i: ]'%(i+4,i+4+mopp[i+3])) 2783 msg.debug() 2784 msg.append(" " + " "*depth + 'if:') 2785 msg.debug() 2786 idssub1, trissub1 = self.parse_mopp(start = i+4, depth = depth+1, toffset = toffset, verbose = verbose) 2787 msg.append(" " + " "*depth + 'else:') 2788 msg.debug() 2789 idssub2, trissub2 = self.parse_mopp(start = i+4+mopp[i+3], depth = depth+1, toffset = toffset, verbose = verbose) 2790 ids.extend([i,i+1,i+2,i+3]) 2791 ids.extend(idssub1) 2792 ids.extend(idssub2) 2793 tris.extend(trissub1) 2794 tris.extend(trissub2) 2795 ret = True 2796 2797 elif code in [0x20,0x21,0x22]: 2798 # compact if-then-else with one argument 2799 msg.append(mopp[i+1], '[ branch ? -> %i: %i: ]'%(i+3,i+3+mopp[i+2])).debug() 2800 msg.append(" " + " "*depth + 'if:').debug() 2801 idssub1, trissub1 = self.parse_mopp(start = i+3, depth = depth+1, toffset = toffset, verbose = verbose) 2802 msg.append(" " + " "*depth + 'else:').debug() 2803 idssub2, trissub2 = self.parse_mopp(start = i+3+mopp[i+2], depth = depth+1, toffset = toffset, verbose = verbose) 2804 ids.extend([i,i+1,i+2]) 2805 ids.extend(idssub1) 2806 ids.extend(idssub2) 2807 tris.extend(trissub1) 2808 tris.extend(trissub2) 2809 ret = True 2810 2811 elif code in [0x23,0x24,0x25]: # short if x <= a then 1; if x > b then 2; 2812 jump1 = mopp[i+3] * 256 + mopp[i+4] 2813 jump2 = mopp[i+5] * 256 + mopp[i+6] 2814 msg.append(mopp[i+1], mopp[i+2], '[ branch ? -> %i: %i: ]'%(i+7+jump1,i+7+jump2)).debug() 2815 msg.append(" " + " "*depth + 'if:').debug() 2816 idssub1, trissub1 = self.parse_mopp(start = i+7+jump1, depth = depth+1, toffset = toffset, verbose = verbose) 2817 msg.append(" " + " "*depth + 'else:').debug() 2818 idssub2, trissub2 = self.parse_mopp(start = i+7+jump2, depth = depth+1, toffset = toffset, verbose = verbose) 2819 ids.extend([i,i+1,i+2,i+3,i+4,i+5,i+6]) 2820 ids.extend(idssub1) 2821 ids.extend(idssub2) 2822 tris.extend(trissub1) 2823 tris.extend(trissub2) 2824 ret = True 2825 elif code in [0x26,0x27,0x28]: 2826 msg.append(mopp[i+1], mopp[i+2]) 2827 if code == 0x26: 2828 msg.append('[ bound X ]') 2829 elif code == 0x27: 2830 msg.append('[ bound Y ]') 2831 elif code == 0x28: 2832 msg.append('[ bound Z ]') 2833 ids.extend([i,i+1,i+2]) 2834 i += 3 2835 elif code in [0x01, 0x02, 0x03, 0x04]: 2836 msg.append(mopp[i+1], mopp[i+2], mopp[i+3], '[ bound XYZ? ]') 2837 ids.extend([i,i+1,i+2,i+3]) 2838 i += 4 2839 else: 2840 msg.append("unknown mopp code 0x%02X"%code).error() 2841 msg.append("following bytes are").debug() 2842 extrabytes = [mopp[j] for j in xrange(i+1,min(self.mopp_data_size,i+10))] 2843 extraindex = [j for j in xrange(i+1,min(self.mopp_data_size,i+10))] 2844 msg.append(extrabytes).debug() 2845 for b, j in zip(extrabytes, extraindex): 2846 if j+b+1 < self.mopp_data_size: 2847 msg.append("opcode after jump %i is 0x%02X"%(b,mopp[j+b+1]), [mopp[k] for k in xrange(j+b+2,min(self.mopp_data_size,j+b+11))]).debug() 2848 raise ValueError("unknown mopp opcode 0x%02X"%code) 2849 2850 msg.debug() 2851 2852 return ids, tris 2853
2854 - class bhkMultiSphereShape:
2855 - def get_mass_center_inertia(self, density = 1, solid = True):
2856 """Return center of gravity and area.""" 2857 subshapes_mci = [ 2858 (mass, center, inertia) 2859 for (mass, inertia), center in 2860 izip( ( pyffi.utils.inertia.getMassInertiaSphere(radius = sphere.radius, 2861 density = density, solid = solid) 2862 for sphere in self.spheres ), 2863 ( sphere.center.as_tuple() for sphere in self.spheres ) ) ] 2864 total_mass = 0 2865 total_center = (0, 0, 0) 2866 total_inertia = ((0, 0, 0), (0, 0, 0), (0, 0, 0)) 2867 for mass, center, inertia in subshapes_mci: 2868 total_mass += mass 2869 total_center = vecAdd(total_center, 2870 vecscalarMul(center, mass / total_mass)) 2871 total_inertia = matAdd(total_inertia, inertia) 2872 return total_mass, total_center, total_inertia
2873
2874 - class bhkNiTriStripsShape:
2876 """Returns a bhkPackedNiTriStripsShape block that is geometrically 2877 interchangeable. 2878 """ 2879 # get all vertices, triangles, and calculate normals 2880 vertices = [] 2881 normals = [] 2882 triangles = [] 2883 for strip in self.strips_data: 2884 triangles.extend( 2885 (tri1 + len(vertices), 2886 tri2 + len(vertices), 2887 tri3 + len(vertices)) 2888 for tri1, tri2, tri3 in strip.get_triangles()) 2889 vertices.extend( 2890 # scaling factor 1/7 applied in add_shape later 2891 vert.as_tuple() for vert in strip.vertices) 2892 normals.extend( 2893 (strip.vertices[tri2] - strip.vertices[tri1]).crossproduct( 2894 strip.vertices[tri3] - strip.vertices[tri1]) 2895 .normalized(ignore_error=True) 2896 .as_tuple() 2897 for tri1, tri2, tri3 in strip.get_triangles()) 2898 # create packed shape and add geometry 2899 packed = NifFormat.bhkPackedNiTriStripsShape() 2900 packed.add_shape( 2901 triangles=triangles, 2902 normals=normals, 2903 vertices=vertices, 2904 # default layer 1 (static collision) 2905 layer=self.data_layers[0].layer if self.data_layers else 1, 2906 material=self.material) 2907 # set unknowns 2908 packed.unknown_floats[2] = 0.1 2909 packed.unknown_floats[4] = 1.0 2910 packed.unknown_floats[5] = 1.0 2911 packed.unknown_floats[6] = 1.0 2912 packed.unknown_floats[8] = 0.1 2913 packed.scale = 1.0 2914 packed.unknown_floats_2[0] = 1.0 2915 packed.unknown_floats_2[1] = 1.0 2916 # return result 2917 return packed
2918
2919 - def get_mass_center_inertia(self, density = 1, solid = True):
2920 """Return mass, center, and inertia tensor.""" 2921 # first find mass, center, and inertia of all shapes 2922 subshapes_mci = [] 2923 for data in self.strips_data: 2924 subshapes_mci.append( 2925 pyffi.utils.inertia.get_mass_center_inertia_polyhedron( 2926 [ vert.as_tuple() for vert in data.vertices ], 2927 [ triangle for triangle in data.get_triangles() ], 2928 density = density, solid = solid)) 2929 2930 # now calculate mass, center, and inertia 2931 total_mass = 0 2932 total_center = (0, 0, 0) 2933 total_inertia = ((0, 0, 0), (0, 0, 0), (0, 0, 0)) 2934 for mass, center, inertia in subshapes_mci: 2935 total_mass += mass 2936 total_center = vecAdd(total_center, 2937 vecscalarMul(center, mass / total_mass)) 2938 total_inertia = matAdd(total_inertia, inertia) 2939 return total_mass, total_center, total_inertia
2940
2941 - class bhkPackedNiTriStripsShape:
2942 - def get_mass_center_inertia(self, density = 1, solid = True):
2943 """Return mass, center, and inertia tensor.""" 2944 return pyffi.utils.inertia.get_mass_center_inertia_polyhedron( 2945 [ vert.as_tuple() for vert in self.data.vertices ], 2946 [ ( hktriangle.triangle.v_1, 2947 hktriangle.triangle.v_2, 2948 hktriangle.triangle.v_3 ) 2949 for hktriangle in self.data.triangles ], 2950 density = density, solid = solid)
2951
2952 - def get_sub_shapes(self):
2953 """Return sub shapes (works for both Oblivion and Fallout 3).""" 2954 if self.data and self.data.sub_shapes: 2955 return self.data.sub_shapes 2956 else: 2957 return self.sub_shapes
2958
2959 - def add_shape(self, triangles, normals, vertices, layer = 0, material = 0):
2960 """Pack the given geometry.""" 2961 # add the shape data 2962 if not self.data: 2963 self.data = NifFormat.hkPackedNiTriStripsData() 2964 data = self.data 2965 # increase number of shapes 2966 num_shapes = self.num_sub_shapes 2967 self.num_sub_shapes = num_shapes + 1 2968 self.sub_shapes.update_size() 2969 data.num_sub_shapes = num_shapes + 1 2970 data.sub_shapes.update_size() 2971 # add the shape 2972 self.sub_shapes[num_shapes].layer = layer 2973 self.sub_shapes[num_shapes].num_vertices = len(vertices) 2974 self.sub_shapes[num_shapes].material = material 2975 data.sub_shapes[num_shapes].layer = layer 2976 data.sub_shapes[num_shapes].num_vertices = len(vertices) 2977 data.sub_shapes[num_shapes].material = material 2978 firsttriangle = data.num_triangles 2979 firstvertex = data.num_vertices 2980 data.num_triangles += len(triangles) 2981 data.triangles.update_size() 2982 for tdata, t, n in zip(data.triangles[firsttriangle:], triangles, normals): 2983 tdata.triangle.v_1 = t[0] + firstvertex 2984 tdata.triangle.v_2 = t[1] + firstvertex 2985 tdata.triangle.v_3 = t[2] + firstvertex 2986 tdata.normal.x = n[0] 2987 tdata.normal.y = n[1] 2988 tdata.normal.z = n[2] 2989 data.num_vertices += len(vertices) 2990 data.vertices.update_size() 2991 for vdata, v in zip(data.vertices[firstvertex:], vertices): 2992 vdata.x = v[0] / 7.0 2993 vdata.y = v[1] / 7.0 2994 vdata.z = v[2] / 7.0
2995
2996 - def get_vertex_hash_generator( 2997 self, 2998 vertexprecision=3, subshape_index=None):
2999 """Generator which produces a tuple of integers for each 3000 vertex to ease detection of duplicate/close enough to remove 3001 vertices. The precision parameter denote number of 3002 significant digits behind the comma. 3003 3004 For vertexprecision, 3 seems usually enough (maybe we'll 3005 have to increase this at some point). 3006 3007 >>> shape = NifFormat.bhkPackedNiTriStripsShape() 3008 >>> data = NifFormat.hkPackedNiTriStripsData() 3009 >>> shape.data = data 3010 >>> shape.num_sub_shapes = 2 3011 >>> shape.sub_shapes.update_size() 3012 >>> data.num_vertices = 3 3013 >>> shape.sub_shapes[0].num_vertices = 2 3014 >>> shape.sub_shapes[1].num_vertices = 1 3015 >>> data.vertices.update_size() 3016 >>> data.vertices[0].x = 0.0 3017 >>> data.vertices[0].y = 0.1 3018 >>> data.vertices[0].z = 0.2 3019 >>> data.vertices[1].x = 1.0 3020 >>> data.vertices[1].y = 1.1 3021 >>> data.vertices[1].z = 1.2 3022 >>> data.vertices[2].x = 2.0 3023 >>> data.vertices[2].y = 2.1 3024 >>> data.vertices[2].z = 2.2 3025 >>> list(shape.get_vertex_hash_generator()) 3026 [(0, (0, 100, 200)), (0, (1000, 1100, 1200)), (1, (2000, 2100, 2200))] 3027 >>> list(shape.get_vertex_hash_generator(subshape_index=0)) 3028 [(0, 100, 200), (1000, 1100, 1200)] 3029 >>> list(shape.get_vertex_hash_generator(subshape_index=1)) 3030 [(2000, 2100, 2200)] 3031 3032 :param vertexprecision: Precision to be used for vertices. 3033 :type vertexprecision: float 3034 :return: A generator yielding a hash value for each vertex. 3035 """ 3036 vertexfactor = 10 ** vertexprecision 3037 if subshape_index is None: 3038 for matid, vert in izip(chain(*[repeat(i, sub_shape.num_vertices) 3039 for i, sub_shape 3040 in enumerate(self.get_sub_shapes())]), 3041 self.data.vertices): 3042 yield (matid, tuple(float_to_int(value * vertexfactor) 3043 for value in vert.as_list())) 3044 else: 3045 first_vertex = 0 3046 for i, subshape in izip(xrange(subshape_index), 3047 self.get_sub_shapes()): 3048 first_vertex += subshape.num_vertices 3049 for vert_index in xrange( 3050 first_vertex, 3051 first_vertex 3052 + self.get_sub_shapes()[subshape_index].num_vertices): 3053 yield tuple(float_to_int(value * vertexfactor) 3054 for value 3055 in self.data.vertices[vert_index].as_list())
3056
3058 """Generator which produces a tuple of integers, or None 3059 in degenerate case, for each triangle to ease detection of 3060 duplicate triangles. 3061 3062 >>> shape = NifFormat.bhkPackedNiTriStripsShape() 3063 >>> data = NifFormat.hkPackedNiTriStripsData() 3064 >>> shape.data = data 3065 >>> data.num_triangles = 6 3066 >>> data.triangles.update_size() 3067 >>> data.triangles[0].triangle.v_1 = 0 3068 >>> data.triangles[0].triangle.v_2 = 1 3069 >>> data.triangles[0].triangle.v_3 = 2 3070 >>> data.triangles[1].triangle.v_1 = 2 3071 >>> data.triangles[1].triangle.v_2 = 1 3072 >>> data.triangles[1].triangle.v_3 = 3 3073 >>> data.triangles[2].triangle.v_1 = 3 3074 >>> data.triangles[2].triangle.v_2 = 2 3075 >>> data.triangles[2].triangle.v_3 = 1 3076 >>> data.triangles[3].triangle.v_1 = 3 3077 >>> data.triangles[3].triangle.v_2 = 1 3078 >>> data.triangles[3].triangle.v_3 = 2 3079 >>> data.triangles[4].triangle.v_1 = 0 3080 >>> data.triangles[4].triangle.v_2 = 0 3081 >>> data.triangles[4].triangle.v_3 = 3 3082 >>> data.triangles[5].triangle.v_1 = 1 3083 >>> data.triangles[5].triangle.v_2 = 3 3084 >>> data.triangles[5].triangle.v_3 = 4 3085 >>> list(shape.get_triangle_hash_generator()) 3086 [(0, 1, 2), (1, 3, 2), (1, 3, 2), (1, 2, 3), None, (1, 3, 4)] 3087 3088 :return: A generator yielding a hash value for each triangle. 3089 """ 3090 for tri in self.data.triangles: 3091 v_1, v_2, v_3 = tri.triangle.v_1, tri.triangle.v_2, tri.triangle.v_3 3092 if v_1 == v_2 or v_2 == v_3 or v_3 == v_1: 3093 # degenerate 3094 yield None 3095 elif v_1 < v_2 and v_1 < v_3: 3096 # v_1 smallest 3097 yield v_1, v_2, v_3 3098 elif v_2 < v_1 and v_2 < v_3: 3099 # v_2 smallest 3100 yield v_2, v_3, v_1 3101 else: 3102 # v_3 smallest 3103 yield v_3, v_1, v_2
3104
3105 - class bhkRagdollConstraint:
3106 - def apply_scale(self, scale):
3107 """Scale data.""" 3108 # apply scale on transform 3109 self.ragdoll.pivot_a.x *= scale 3110 self.ragdoll.pivot_a.y *= scale 3111 self.ragdoll.pivot_a.z *= scale 3112 self.ragdoll.pivot_b.x *= scale 3113 self.ragdoll.pivot_b.y *= scale 3114 self.ragdoll.pivot_b.z *= scale
3115
3116 - def update_a_b(self, parent):
3117 """Update the B data from the A data.""" 3118 self.ragdoll.update_a_b(self.get_transform_a_b(parent))
3119
3120 - class bhkRefObject:
3121 - def get_shape_mass_center_inertia(self, density=1, solid=True):
3122 """Return mass, center of gravity, and inertia tensor of 3123 this object's shape, if self.shape is not None. 3124 3125 If self.shape is None, then returns zeros for everything. 3126 """ 3127 if not self.shape: 3128 mass = 0 3129 center = (0, 0, 0) 3130 inertia = ((0, 0, 0), (0, 0, 0), (0, 0, 0)) 3131 else: 3132 mass, center, inertia = self.shape.get_mass_center_inertia( 3133 density=density, solid=solid) 3134 return mass, center, inertia
3135
3136 - class bhkRigidBody:
3137 - def apply_scale(self, scale):
3138 """Apply scale factor <scale> on data.""" 3139 # apply scale on transform 3140 self.translation.x *= scale 3141 self.translation.y *= scale 3142 self.translation.z *= scale 3143 3144 # apply scale on center of gravity 3145 self.center.x *= scale 3146 self.center.y *= scale 3147 self.center.z *= scale 3148 3149 # apply scale on inertia tensor 3150 self.inertia.m_11 *= (scale ** 2) 3151 self.inertia.m_12 *= (scale ** 2) 3152 self.inertia.m_13 *= (scale ** 2) 3153 self.inertia.m_14 *= (scale ** 2) 3154 self.inertia.m_21 *= (scale ** 2) 3155 self.inertia.m_22 *= (scale ** 2) 3156 self.inertia.m_23 *= (scale ** 2) 3157 self.inertia.m_24 *= (scale ** 2) 3158 self.inertia.m_31 *= (scale ** 2) 3159 self.inertia.m_32 *= (scale ** 2) 3160 self.inertia.m_33 *= (scale ** 2) 3161 self.inertia.m_34 *= (scale ** 2)
3162
3163 - def update_mass_center_inertia(self, density=1, solid=True, mass=None):
3164 """Look at all the objects under this rigid body and update the mass, 3165 center of gravity, and inertia tensor accordingly. If the C{mass} parameter 3166 is given then the C{density} argument is ignored.""" 3167 if not mass is None: 3168 density = 1 3169 3170 calc_mass, center, inertia = self.get_shape_mass_center_inertia( 3171 density=density, solid=solid) 3172 3173 self.mass = calc_mass 3174 self.center.x, self.center.y, self.center.z = center 3175 self.inertia.m_11 = inertia[0][0] 3176 self.inertia.m_12 = inertia[0][1] 3177 self.inertia.m_13 = inertia[0][2] 3178 self.inertia.m_14 = 0 3179 self.inertia.m_21 = inertia[1][0] 3180 self.inertia.m_22 = inertia[1][1] 3181 self.inertia.m_23 = inertia[1][2] 3182 self.inertia.m_24 = 0 3183 self.inertia.m_31 = inertia[2][0] 3184 self.inertia.m_32 = inertia[2][1] 3185 self.inertia.m_33 = inertia[2][2] 3186 self.inertia.m_34 = 0 3187 3188 if not mass is None: 3189 mass_correction = mass / calc_mass if calc_mass != 0 else 1 3190 self.mass = mass 3191 self.inertia.m_11 *= mass_correction 3192 self.inertia.m_12 *= mass_correction 3193 self.inertia.m_13 *= mass_correction 3194 self.inertia.m_14 *= mass_correction 3195 self.inertia.m_21 *= mass_correction 3196 self.inertia.m_22 *= mass_correction 3197 self.inertia.m_23 *= mass_correction 3198 self.inertia.m_24 *= mass_correction 3199 self.inertia.m_31 *= mass_correction 3200 self.inertia.m_32 *= mass_correction 3201 self.inertia.m_33 *= mass_correction 3202 self.inertia.m_34 *= mass_correction
3203
3204 - class bhkSphereShape:
3205 - def apply_scale(self, scale):
3206 """Apply scale factor <scale> on data.""" 3207 # apply scale on dimensions 3208 self.radius *= scale
3209
3210 - def get_mass_center_inertia(self, density = 1, solid = True):
3211 """Return mass, center, and inertia tensor.""" 3212 # the dimensions describe half the size of the box in each dimension 3213 # so the length of a single edge is dimension.dir * 2 3214 mass, inertia = pyffi.utils.inertia.getMassInertiaSphere( 3215 self.radius, density = density, solid = solid) 3216 return mass, (0,0,0), inertia
3217
3218 - class bhkTransformShape:
3219 - def apply_scale(self, scale):
3220 """Apply scale factor <scale> on data.""" 3221 # apply scale on translation 3222 self.transform.m_14 *= scale 3223 self.transform.m_24 *= scale 3224 self.transform.m_34 *= scale
3225
3226 - def get_mass_center_inertia(self, density=1, solid=True):
3227 """Return mass, center, and inertia tensor.""" 3228 # get shape mass, center, and inertia 3229 mass, center, inertia = self.get_shape_mass_center_inertia( 3230 density=density, solid=solid) 3231 # get transform matrix and translation vector 3232 transform = self.transform.get_matrix_33().as_tuple() 3233 transform_transposed = matTransposed(transform) 3234 translation = ( self.transform.m_14, self.transform.m_24, self.transform.m_34 ) 3235 # transform center and inertia 3236 center = matvecMul(transform, center) 3237 center = vecAdd(center, translation) 3238 inertia = matMul(matMul(transform_transposed, inertia), transform) 3239 # return updated mass center and inertia 3240 return mass, center, inertia
3241
3242 - class BSBound:
3243 - def apply_scale(self, scale):
3244 """Scale data.""" 3245 self.center.x *= scale 3246 self.center.y *= scale 3247 self.center.z *= scale 3248 self.dimensions.x *= scale 3249 self.dimensions.y *= scale 3250 self.dimensions.z *= scale
3251
3252 - class BSDismemberSkinInstance:
3253 - def get_dismember_partitions(self):
3254 """Return triangles and body part indices.""" 3255 triangles = [] 3256 trianglepartmap = [] 3257 for bodypart, skinpartblock in zip( 3258 self.partitions, self.skin_partition.skin_partition_blocks): 3259 part_triangles = list(skinpartblock.get_mapped_triangles()) 3260 triangles += part_triangles 3261 trianglepartmap += [bodypart.body_part] * len(part_triangles) 3262 return triangles, trianglepartmap
3263 3372
3373 - class hkPackedNiTriStripsData:
3374 - def apply_scale(self, scale):
3375 """Apply scale factor on data.""" 3376 if abs(scale - 1.0) < NifFormat.EPSILON: 3377 return 3378 for vert in self.vertices: 3379 vert.x *= scale 3380 vert.y *= scale 3381 vert.z *= scale
3382
3383 - class InertiaMatrix:
3384 - def as_list(self):
3385 """Return matrix as 3x3 list.""" 3386 return [ 3387 [self.m_11, self.m_12, self.m_13], 3388 [self.m_21, self.m_22, self.m_23], 3389 [self.m_31, self.m_32, self.m_33] 3390 ]
3391
3392 - def as_tuple(self):
3393 """Return matrix as 3x3 tuple.""" 3394 return ( 3395 (self.m_11, self.m_12, self.m_13), 3396 (self.m_21, self.m_22, self.m_23), 3397 (self.m_31, self.m_32, self.m_33) 3398 )
3399
3400 - def __str__(self):
3401 return( 3402 "[ %6.3f %6.3f %6.3f ]\n" 3403 "[ %6.3f %6.3f %6.3f ]\n" 3404 "[ %6.3f %6.3f %6.3f ]\n" 3405 % (self.m_11, self.m_12, self.m_13, 3406 self.m_21, self.m_22, self.m_23, 3407 self.m_31, self.m_32, self.m_33))
3408
3409 - def set_identity(self):
3410 """Set to identity matrix.""" 3411 self.m_11 = 1.0 3412 self.m_12 = 0.0 3413 self.m_13 = 0.0 3414 self.m_14 = 0.0 3415 self.m_21 = 0.0 3416 self.m_22 = 1.0 3417 self.m_23 = 0.0 3418 self.m_24 = 0.0 3419 self.m_31 = 0.0 3420 self.m_32 = 0.0 3421 self.m_33 = 1.0 3422 self.m_34 = 0.0
3423
3424 - def is_identity(self):
3425 """Return ``True`` if the matrix is close to identity.""" 3426 if (abs(self.m_11 - 1.0) > NifFormat.EPSILON 3427 or abs(self.m_12) > NifFormat.EPSILON 3428 or abs(self.m_13) > NifFormat.EPSILON 3429 or abs(self.m_21) > NifFormat.EPSILON 3430 or abs(self.m_22 - 1.0) > NifFormat.EPSILON 3431 or abs(self.m_23) > NifFormat.EPSILON 3432 or abs(self.m_31) > NifFormat.EPSILON 3433 or abs(self.m_32) > NifFormat.EPSILON 3434 or abs(self.m_33 - 1.0) > NifFormat.EPSILON): 3435 return False 3436 else: 3437 return True
3438
3439 - def get_copy(self):
3440 """Return a copy of the matrix.""" 3441 mat = NifFormat.InertiaMatrix() 3442 mat.m_11 = self.m_11 3443 mat.m_12 = self.m_12 3444 mat.m_13 = self.m_13 3445 mat.m_14 = self.m_14 3446 mat.m_21 = self.m_21 3447 mat.m_22 = self.m_22 3448 mat.m_23 = self.m_23 3449 mat.m_24 = self.m_24 3450 mat.m_31 = self.m_31 3451 mat.m_32 = self.m_32 3452 mat.m_33 = self.m_33 3453 mat.m_34 = self.m_34 3454 return mat
3455
3456 - def __eq__(self, mat):
3457 if not isinstance(mat, NifFormat.InertiaMatrix): 3458 raise TypeError( 3459 "do not know how to compare InertiaMatrix and %s"%mat.__class__) 3460 if (abs(self.m_11 - mat.m_11) > NifFormat.EPSILON 3461 or abs(self.m_12 - mat.m_12) > NifFormat.EPSILON 3462 or abs(self.m_13 - mat.m_13) > NifFormat.EPSILON 3463 or abs(self.m_21 - mat.m_21) > NifFormat.EPSILON 3464 or abs(self.m_22 - mat.m_22) > NifFormat.EPSILON 3465 or abs(self.m_23 - mat.m_23) > NifFormat.EPSILON 3466 or abs(self.m_31 - mat.m_31) > NifFormat.EPSILON 3467 or abs(self.m_32 - mat.m_32) > NifFormat.EPSILON 3468 or abs(self.m_33 - mat.m_33) > NifFormat.EPSILON): 3469 return False 3470 return True
3471
3472 - def __ne__(self, mat):
3473 return not self.__eq__(mat)
3474
3475 - class LimitedHingeDescriptor:
3476 - def update_a_b(self, transform):
3477 """Update B pivot and axes from A using the given transform.""" 3478 # pivot point 3479 pivot_b = ((7 * self.pivot_a.get_vector_3()) * transform) / 7.0 3480 self.pivot_b.x = pivot_b.x 3481 self.pivot_b.y = pivot_b.y 3482 self.pivot_b.z = pivot_b.z 3483 # axes (rotation only) 3484 transform = transform.get_matrix_33() 3485 axle_b = self.axle_a.get_vector_3() * transform 3486 perp_2_axle_in_b_2 = self.perp_2_axle_in_a_2.get_vector_3() * transform 3487 self.axle_b.x = axle_b.x 3488 self.axle_b.y = axle_b.y 3489 self.axle_b.z = axle_b.z 3490 self.perp_2_axle_in_b_2.x = perp_2_axle_in_b_2.x 3491 self.perp_2_axle_in_b_2.y = perp_2_axle_in_b_2.y 3492 self.perp_2_axle_in_b_2.z = perp_2_axle_in_b_2.z
3493
3494 - class Matrix44:
3495 - def as_list(self):
3496 """Return matrix as 4x4 list.""" 3497 return [ 3498 [self.m_11, self.m_12, self.m_13, self.m_14], 3499 [self.m_21, self.m_22, self.m_23, self.m_24], 3500 [self.m_31, self.m_32, self.m_33, self.m_34], 3501 [self.m_41, self.m_42, self.m_43, self.m_44] 3502 ]
3503
3504 - def as_tuple(self):
3505 """Return matrix as 4x4 tuple.""" 3506 return ( 3507 (self.m_11, self.m_12, self.m_13, self.m_14), 3508 (self.m_21, self.m_22, self.m_23, self.m_24), 3509 (self.m_31, self.m_32, self.m_33, self.m_34), 3510 (self.m_41, self.m_42, self.m_43, self.m_44) 3511 )
3512
3513 - def set_rows(self, row0, row1, row2, row3):
3514 """Set matrix from rows.""" 3515 self.m_11, self.m_12, self.m_13, self.m_14 = row0 3516 self.m_21, self.m_22, self.m_23, self.m_24 = row1 3517 self.m_31, self.m_32, self.m_33, self.m_34 = row2 3518 self.m_41, self.m_42, self.m_43, self.m_44 = row3
3519
3520 - def __str__(self):
3521 return( 3522 "[ %6.3f %6.3f %6.3f %6.3f ]\n" 3523 "[ %6.3f %6.3f %6.3f %6.3f ]\n" 3524 "[ %6.3f %6.3f %6.3f %6.3f ]\n" 3525 "[ %6.3f %6.3f %6.3f %6.3f ]\n" 3526 % (self.m_11, self.m_12, self.m_13, self.m_14, 3527 self.m_21, self.m_22, self.m_23, self.m_24, 3528 self.m_31, self.m_32, self.m_33, self.m_34, 3529 self.m_41, self.m_42, self.m_43, self.m_44))
3530
3531 - def set_identity(self):
3532 """Set to identity matrix.""" 3533 self.m_11 = 1.0 3534 self.m_12 = 0.0 3535 self.m_13 = 0.0 3536 self.m_14 = 0.0 3537 self.m_21 = 0.0 3538 self.m_22 = 1.0 3539 self.m_23 = 0.0 3540 self.m_24 = 0.0 3541 self.m_31 = 0.0 3542 self.m_32 = 0.0 3543 self.m_33 = 1.0 3544 self.m_34 = 0.0 3545 self.m_41 = 0.0 3546 self.m_42 = 0.0 3547 self.m_43 = 0.0 3548 self.m_44 = 1.0
3549
3550 - def is_identity(self):
3551 """Return ``True`` if the matrix is close to identity.""" 3552 if (abs(self.m_11 - 1.0) > NifFormat.EPSILON 3553 or abs(self.m_12) > NifFormat.EPSILON 3554 or abs(self.m_13) > NifFormat.EPSILON 3555 or abs(self.m_14) > NifFormat.EPSILON 3556 or abs(self.m_21) > NifFormat.EPSILON 3557 or abs(self.m_22 - 1.0) > NifFormat.EPSILON 3558 or abs(self.m_23) > NifFormat.EPSILON 3559 or abs(self.m_24) > NifFormat.EPSILON 3560 or abs(self.m_31) > NifFormat.EPSILON 3561 or abs(self.m_32) > NifFormat.EPSILON 3562 or abs(self.m_33 - 1.0) > NifFormat.EPSILON 3563 or abs(self.m_34) > NifFormat.EPSILON 3564 or abs(self.m_41) > NifFormat.EPSILON 3565 or abs(self.m_42) > NifFormat.EPSILON 3566 or abs(self.m_43) > NifFormat.EPSILON 3567 or abs(self.m_44 - 1.0) > NifFormat.EPSILON): 3568 return False 3569 else: 3570 return True
3571
3572 - def get_copy(self):
3573 """Create a copy of the matrix.""" 3574 mat = NifFormat.Matrix44() 3575 mat.m_11 = self.m_11 3576 mat.m_12 = self.m_12 3577 mat.m_13 = self.m_13 3578 mat.m_14 = self.m_14 3579 mat.m_21 = self.m_21 3580 mat.m_22 = self.m_22 3581 mat.m_23 = self.m_23 3582 mat.m_24 = self.m_24 3583 mat.m_31 = self.m_31 3584 mat.m_32 = self.m_32 3585 mat.m_33 = self.m_33 3586 mat.m_34 = self.m_34 3587 mat.m_41 = self.m_41 3588 mat.m_42 = self.m_42 3589 mat.m_43 = self.m_43 3590 mat.m_44 = self.m_44 3591 return mat
3592
3593 - def get_matrix_33(self):
3594 """Returns upper left 3x3 part.""" 3595 m = NifFormat.Matrix33() 3596 m.m_11 = self.m_11 3597 m.m_12 = self.m_12 3598 m.m_13 = self.m_13 3599 m.m_21 = self.m_21 3600 m.m_22 = self.m_22 3601 m.m_23 = self.m_23 3602 m.m_31 = self.m_31 3603 m.m_32 = self.m_32 3604 m.m_33 = self.m_33 3605 return m
3606
3607 - def set_matrix_33(self, m):
3608 """Sets upper left 3x3 part.""" 3609 if not isinstance(m, NifFormat.Matrix33): 3610 raise TypeError('argument must be Matrix33') 3611 self.m_11 = m.m_11 3612 self.m_12 = m.m_12 3613 self.m_13 = m.m_13 3614 self.m_21 = m.m_21 3615 self.m_22 = m.m_22 3616 self.m_23 = m.m_23 3617 self.m_31 = m.m_31 3618 self.m_32 = m.m_32 3619 self.m_33 = m.m_33
3620
3621 - def get_translation(self):
3622 """Returns lower left 1x3 part.""" 3623 t = NifFormat.Vector3() 3624 t.x = self.m_41 3625 t.y = self.m_42 3626 t.z = self.m_43 3627 return t
3628
3629 - def set_translation(self, translation):
3630 """Returns lower left 1x3 part.""" 3631 if not isinstance(translation, NifFormat.Vector3): 3632 raise TypeError('argument must be Vector3') 3633 self.m_41 = translation.x 3634 self.m_42 = translation.y 3635 self.m_43 = translation.z
3636
3638 if not self.get_matrix_33().is_scale_rotation(): return False 3639 if abs(self.m_14) > NifFormat.EPSILON: return False 3640 if abs(self.m_24) > NifFormat.EPSILON: return False 3641 if abs(self.m_34) > NifFormat.EPSILON: return False 3642 if abs(self.m_44 - 1.0) > NifFormat.EPSILON: return False 3643 return True
3644
3646 rotscl = self.get_matrix_33() 3647 scale = rotscl.get_scale() 3648 rot = rotscl / scale 3649 trans = self.get_translation() 3650 return (scale, rot, trans)
3651
3652 - def get_scale_quat_translation(self):
3653 rotscl = self.get_matrix_33() 3654 scale, quat = rotscl.get_scale_quat() 3655 trans = self.get_translation() 3656 return (scale, quat, trans)
3657
3658 - def set_scale_rotation_translation(self, scale, rotation, translation):
3659 if not isinstance(scale, (float, int, long)): 3660 raise TypeError('scale must be float') 3661 if not isinstance(rotation, NifFormat.Matrix33): 3662 raise TypeError('rotation must be Matrix33') 3663 if not isinstance(translation, NifFormat.Vector3): 3664 raise TypeError('translation must be Vector3') 3665 3666 if not rotation.is_rotation(): 3667 logger = logging.getLogger("pyffi.nif.matrix") 3668 mat = rotation * rotation.get_transpose() 3669 idmat = NifFormat.Matrix33() 3670 idmat.set_identity() 3671 error = (mat - idmat).sup_norm() 3672 logger.warning("improper rotation matrix (error is %f)" % error) 3673 logger.debug(" matrix =") 3674 for line in str(rotation).split("\n"): 3675 logger.debug(" %s" % line) 3676 logger.debug(" its determinant = %f" % rotation.get_determinant()) 3677 logger.debug(" matrix * matrix^T =") 3678 for line in str(mat).split("\n"): 3679 logger.debug(" %s" % line) 3680 3681 self.m_14 = 0.0 3682 self.m_24 = 0.0 3683 self.m_34 = 0.0 3684 self.m_44 = 1.0 3685 3686 self.set_matrix_33(rotation * scale) 3687 self.set_translation(translation)
3688
3689 - def get_inverse(self, fast=True):
3690 """Calculates inverse (fast assumes is_scale_rotation_translation is True).""" 3691 def adjoint(m, ii, jj): 3692 result = [] 3693 for i, row in enumerate(m): 3694 if i == ii: continue 3695 result.append([]) 3696 for j, x in enumerate(row): 3697 if j == jj: continue 3698 result[-1].append(x) 3699 return result
3700 def determinant(m): 3701 if len(m) == 2: 3702 return m[0][0]*m[1][1] - m[1][0]*m[0][1] 3703 result = 0.0 3704 for i in xrange(len(m)): 3705 det = determinant(adjoint(m, i, 0)) 3706 if i & 1: 3707 result -= m[i][0] * det 3708 else: 3709 result += m[i][0] * det 3710 return result
3711 3712 if fast: 3713 m = self.get_matrix_33().get_inverse() 3714 t = -(self.get_translation() * m) 3715 3716 n = NifFormat.Matrix44() 3717 n.m_14 = 0.0 3718 n.m_24 = 0.0 3719 n.m_34 = 0.0 3720 n.m_44 = 1.0 3721 n.set_matrix_33(m) 3722 n.set_translation(t) 3723 return n 3724 else: 3725 m = self.as_list() 3726 nn = [[0.0 for i in xrange(4)] for j in xrange(4)] 3727 det = determinant(m) 3728 if abs(det) < NifFormat.EPSILON: 3729 raise ZeroDivisionError('cannot invert matrix:\n%s'%self) 3730 for i in xrange(4): 3731 for j in xrange(4): 3732 if (i+j) & 1: 3733 nn[j][i] = -determinant(adjoint(m, i, j)) / det 3734 else: 3735 nn[j][i] = determinant(adjoint(m, i, j)) / det 3736 n = NifFormat.Matrix44() 3737 n.set_rows(*nn) 3738 return n 3739
3740 - def __mul__(self, x):
3741 if isinstance(x, (float, int, long)): 3742 m = NifFormat.Matrix44() 3743 m.m_11 = self.m_11 * x 3744 m.m_12 = self.m_12 * x 3745 m.m_13 = self.m_13 * x 3746 m.m_14 = self.m_14 * x 3747 m.m_21 = self.m_21 * x 3748 m.m_22 = self.m_22 * x 3749 m.m_23 = self.m_23 * x 3750 m.m_24 = self.m_24 * x 3751 m.m_31 = self.m_31 * x 3752 m.m_32 = self.m_32 * x 3753 m.m_33 = self.m_33 * x 3754 m.m_34 = self.m_34 * x 3755 m.m_41 = self.m_41 * x 3756 m.m_42 = self.m_42 * x 3757 m.m_43 = self.m_43 * x 3758 m.m_44 = self.m_44 * x 3759 return m 3760 elif isinstance(x, NifFormat.Vector3): 3761 raise TypeError("matrix*vector not supported; please use left multiplication (vector*matrix)") 3762 elif isinstance(x, NifFormat.Vector4): 3763 raise TypeError("matrix*vector not supported; please use left multiplication (vector*matrix)") 3764 elif isinstance(x, NifFormat.Matrix44): 3765 m = NifFormat.Matrix44() 3766 m.m_11 = self.m_11 * x.m_11 + self.m_12 * x.m_21 + self.m_13 * x.m_31 + self.m_14 * x.m_41 3767 m.m_12 = self.m_11 * x.m_12 + self.m_12 * x.m_22 + self.m_13 * x.m_32 + self.m_14 * x.m_42 3768 m.m_13 = self.m_11 * x.m_13 + self.m_12 * x.m_23 + self.m_13 * x.m_33 + self.m_14 * x.m_43 3769 m.m_14 = self.m_11 * x.m_14 + self.m_12 * x.m_24 + self.m_13 * x.m_34 + self.m_14 * x.m_44 3770 m.m_21 = self.m_21 * x.m_11 + self.m_22 * x.m_21 + self.m_23 * x.m_31 + self.m_24 * x.m_41 3771 m.m_22 = self.m_21 * x.m_12 + self.m_22 * x.m_22 + self.m_23 * x.m_32 + self.m_24 * x.m_42 3772 m.m_23 = self.m_21 * x.m_13 + self.m_22 * x.m_23 + self.m_23 * x.m_33 + self.m_24 * x.m_43 3773 m.m_24 = self.m_21 * x.m_14 + self.m_22 * x.m_24 + self.m_23 * x.m_34 + self.m_24 * x.m_44 3774 m.m_31 = self.m_31 * x.m_11 + self.m_32 * x.m_21 + self.m_33 * x.m_31 + self.m_34 * x.m_41 3775 m.m_32 = self.m_31 * x.m_12 + self.m_32 * x.m_22 + self.m_33 * x.m_32 + self.m_34 * x.m_42 3776 m.m_33 = self.m_31 * x.m_13 + self.m_32 * x.m_23 + self.m_33 * x.m_33 + self.m_34 * x.m_43 3777 m.m_34 = self.m_31 * x.m_14 + self.m_32 * x.m_24 + self.m_33 * x.m_34 + self.m_34 * x.m_44 3778 m.m_41 = self.m_41 * x.m_11 + self.m_42 * x.m_21 + self.m_43 * x.m_31 + self.m_44 * x.m_41 3779 m.m_42 = self.m_41 * x.m_12 + self.m_42 * x.m_22 + self.m_43 * x.m_32 + self.m_44 * x.m_42 3780 m.m_43 = self.m_41 * x.m_13 + self.m_42 * x.m_23 + self.m_43 * x.m_33 + self.m_44 * x.m_43 3781 m.m_44 = self.m_41 * x.m_14 + self.m_42 * x.m_24 + self.m_43 * x.m_34 + self.m_44 * x.m_44 3782 return m 3783 else: 3784 raise TypeError("do not know how to multiply Matrix44 with %s"%x.__class__)
3785
3786 - def __div__(self, x):
3787 if isinstance(x, (float, int, long)): 3788 m = NifFormat.Matrix44() 3789 m.m_11 = self.m_11 / x 3790 m.m_12 = self.m_12 / x 3791 m.m_13 = self.m_13 / x 3792 m.m_14 = self.m_14 / x 3793 m.m_21 = self.m_21 / x 3794 m.m_22 = self.m_22 / x 3795 m.m_23 = self.m_23 / x 3796 m.m_24 = self.m_24 / x 3797 m.m_31 = self.m_31 / x 3798 m.m_32 = self.m_32 / x 3799 m.m_33 = self.m_33 / x 3800 m.m_34 = self.m_34 / x 3801 m.m_41 = self.m_41 / x 3802 m.m_42 = self.m_42 / x 3803 m.m_43 = self.m_43 / x 3804 m.m_44 = self.m_44 / x 3805 return m 3806 else: 3807 raise TypeError("do not know how to divide Matrix44 by %s"%x.__class__)
3808 3809 # py3k 3810 __truediv__ = __div__ 3811
3812 - def __rmul__(self, x):
3813 if isinstance(x, (float, int, long)): 3814 return self * x 3815 else: 3816 raise TypeError("do not know how to multiply %s with Matrix44"%x.__class__)
3817
3818 - def __eq__(self, m):
3819 if isinstance(m, type(None)): 3820 return False 3821 if not isinstance(m, NifFormat.Matrix44): 3822 raise TypeError("do not know how to compare Matrix44 and %s"%m.__class__) 3823 if abs(self.m_11 - m.m_11) > NifFormat.EPSILON: return False 3824 if abs(self.m_12 - m.m_12) > NifFormat.EPSILON: return False 3825 if abs(self.m_13 - m.m_13) > NifFormat.EPSILON: return False 3826 if abs(self.m_14 - m.m_14) > NifFormat.EPSILON: return False 3827 if abs(self.m_21 - m.m_21) > NifFormat.EPSILON: return False 3828 if abs(self.m_22 - m.m_22) > NifFormat.EPSILON: return False 3829 if abs(self.m_23 - m.m_23) > NifFormat.EPSILON: return False 3830 if abs(self.m_24 - m.m_24) > NifFormat.EPSILON: return False 3831 if abs(self.m_31 - m.m_31) > NifFormat.EPSILON: return False 3832 if abs(self.m_32 - m.m_32) > NifFormat.EPSILON: return False 3833 if abs(self.m_33 - m.m_33) > NifFormat.EPSILON: return False 3834 if abs(self.m_34 - m.m_34) > NifFormat.EPSILON: return False 3835 if abs(self.m_41 - m.m_41) > NifFormat.EPSILON: return False 3836 if abs(self.m_42 - m.m_42) > NifFormat.EPSILON: return False 3837 if abs(self.m_43 - m.m_43) > NifFormat.EPSILON: return False 3838 if abs(self.m_44 - m.m_44) > NifFormat.EPSILON: return False 3839 return True
3840
3841 - def __ne__(self, m):
3842 return not self.__eq__(m)
3843
3844 - def __add__(self, x):
3845 if isinstance(x, (NifFormat.Matrix44)): 3846 m = NifFormat.Matrix44() 3847 m.m_11 = self.m_11 + x.m_11 3848 m.m_12 = self.m_12 + x.m_12 3849 m.m_13 = self.m_13 + x.m_13 3850 m.m_14 = self.m_14 + x.m_14 3851 m.m_21 = self.m_21 + x.m_21 3852 m.m_22 = self.m_22 + x.m_22 3853 m.m_23 = self.m_23 + x.m_23 3854 m.m_24 = self.m_24 + x.m_24 3855 m.m_31 = self.m_31 + x.m_31 3856 m.m_32 = self.m_32 + x.m_32 3857 m.m_33 = self.m_33 + x.m_33 3858 m.m_34 = self.m_34 + x.m_34 3859 m.m_41 = self.m_41 + x.m_41 3860 m.m_42 = self.m_42 + x.m_42 3861 m.m_43 = self.m_43 + x.m_43 3862 m.m_44 = self.m_44 + x.m_44 3863 return m 3864 elif isinstance(x, (int, long, float)): 3865 m = NifFormat.Matrix44() 3866 m.m_11 = self.m_11 + x 3867 m.m_12 = self.m_12 + x 3868 m.m_13 = self.m_13 + x 3869 m.m_14 = self.m_14 + x 3870 m.m_21 = self.m_21 + x 3871 m.m_22 = self.m_22 + x 3872 m.m_23 = self.m_23 + x 3873 m.m_24 = self.m_24 + x 3874 m.m_31 = self.m_31 + x 3875 m.m_32 = self.m_32 + x 3876 m.m_33 = self.m_33 + x 3877 m.m_34 = self.m_34 + x 3878 m.m_41 = self.m_41 + x 3879 m.m_42 = self.m_42 + x 3880 m.m_43 = self.m_43 + x 3881 m.m_44 = self.m_44 + x 3882 return m 3883 else: 3884 raise TypeError("do not know how to add Matrix44 and %s"%x.__class__)
3885
3886 - def __sub__(self, x):
3887 if isinstance(x, (NifFormat.Matrix44)): 3888 m = NifFormat.Matrix44() 3889 m.m_11 = self.m_11 - x.m_11 3890 m.m_12 = self.m_12 - x.m_12 3891 m.m_13 = self.m_13 - x.m_13 3892 m.m_14 = self.m_14 - x.m_14 3893 m.m_21 = self.m_21 - x.m_21 3894 m.m_22 = self.m_22 - x.m_22 3895 m.m_23 = self.m_23 - x.m_23 3896 m.m_24 = self.m_24 - x.m_24 3897 m.m_31 = self.m_31 - x.m_31 3898 m.m_32 = self.m_32 - x.m_32 3899 m.m_33 = self.m_33 - x.m_33 3900 m.m_34 = self.m_34 - x.m_34 3901 m.m_41 = self.m_41 - x.m_41 3902 m.m_42 = self.m_42 - x.m_42 3903 m.m_43 = self.m_43 - x.m_43 3904 m.m_44 = self.m_44 - x.m_44 3905 return m 3906 elif isinstance(x, (int, long, float)): 3907 m = NifFormat.Matrix44() 3908 m.m_11 = self.m_11 - x 3909 m.m_12 = self.m_12 - x 3910 m.m_13 = self.m_13 - x 3911 m.m_14 = self.m_14 - x 3912 m.m_21 = self.m_21 - x 3913 m.m_22 = self.m_22 - x 3914 m.m_23 = self.m_23 - x 3915 m.m_24 = self.m_24 - x 3916 m.m_31 = self.m_31 - x 3917 m.m_32 = self.m_32 - x 3918 m.m_33 = self.m_33 - x 3919 m.m_34 = self.m_34 - x 3920 m.m_41 = self.m_41 - x 3921 m.m_42 = self.m_42 - x 3922 m.m_43 = self.m_43 - x 3923 m.m_44 = self.m_44 - x 3924 return m 3925 else: 3926 raise TypeError("do not know how to substract Matrix44 and %s" 3927 % x.__class__)
3928
3929 - def sup_norm(self):
3930 """Calculate supremum norm of matrix (maximum absolute value of all 3931 entries).""" 3932 return max(max(abs(elem) for elem in row) 3933 for row in self.as_list())
3934
3935 - class NiAVObject:
3936 """ 3937 >>> from pyffi.formats.nif import NifFormat 3938 >>> node = NifFormat.NiNode() 3939 >>> prop1 = NifFormat.NiProperty() 3940 >>> prop1.name = "hello" 3941 >>> prop2 = NifFormat.NiProperty() 3942 >>> prop2.name = "world" 3943 >>> node.get_properties() 3944 [] 3945 >>> node.set_properties([prop1, prop2]) 3946 >>> [prop.name for prop in node.get_properties()] 3947 ['hello', 'world'] 3948 >>> [prop.name for prop in node.properties] 3949 ['hello', 'world'] 3950 >>> node.set_properties([]) 3951 >>> node.get_properties() 3952 [] 3953 >>> # now set them the other way around 3954 >>> node.set_properties([prop2, prop1]) 3955 >>> [prop.name for prop in node.get_properties()] 3956 ['world', 'hello'] 3957 >>> [prop.name for prop in node.properties] 3958 ['world', 'hello'] 3959 >>> node.remove_property(prop2) 3960 >>> [prop.name for prop in node.properties] 3961 ['hello'] 3962 >>> node.add_property(prop2) 3963 >>> [prop.name for prop in node.properties] 3964 ['hello', 'world'] 3965 """
3966 - def add_property(self, prop):
3967 """Add the given property to the property list. 3968 3969 :param prop: The property block to add. 3970 :type prop: L{NifFormat.NiProperty} 3971 """ 3972 num_props = self.num_properties 3973 self.num_properties = num_props + 1 3974 self.properties.update_size() 3975 self.properties[num_props] = prop
3976
3977 - def remove_property(self, prop):
3978 """Remove the given property to the property list. 3979 3980 :param prop: The property block to remove. 3981 :type prop: L{NifFormat.NiProperty} 3982 """ 3983 self.set_properties([otherprop for otherprop in self.get_properties() 3984 if not(otherprop is prop)])
3985
3986 - def get_properties(self):
3987 """Return a list of the properties of the block. 3988 3989 :return: The list of properties. 3990 :rtype: ``list`` of L{NifFormat.NiProperty} 3991 """ 3992 return [prop for prop in self.properties]
3993
3994 - def set_properties(self, proplist):
3995 """Set the list of properties from the given list (destroys existing list). 3996 3997 :param proplist: The list of property blocks to set. 3998 :type proplist: ``list`` of L{NifFormat.NiProperty} 3999 """ 4000 self.num_properties = len(proplist) 4001 self.properties.update_size() 4002 for i, prop in enumerate(proplist): 4003 self.properties[i] = prop
4004
4005 - def get_transform(self, relative_to=None):
4006 """Return scale, rotation, and translation into a single 4x4 4007 matrix, relative to the C{relative_to} block (which should be 4008 another NiAVObject connecting to this block). If C{relative_to} is 4009 ``None``, then returns the transform stored in C{self}, or 4010 equivalently, the target is assumed to be the parent. 4011 4012 :param relative_to: The block relative to which the transform must 4013 be calculated. If ``None``, the local transform is returned. 4014 """ 4015 m = NifFormat.Matrix44() 4016 m.set_scale_rotation_translation(self.scale, self.rotation, self.translation) 4017 if not relative_to: return m 4018 # find chain from relative_to to self 4019 chain = relative_to.find_chain(self, block_type = NifFormat.NiAVObject) 4020 if not chain: 4021 raise ValueError( 4022 'cannot find a chain of NiAVObject blocks ' 4023 'between %s and %s.' % (self.name, relative_to.name)) 4024 # and multiply with all transform matrices (not including relative_to) 4025 for block in reversed(chain[1:-1]): 4026 m *= block.get_transform() 4027 return m
4028
4029 - def set_transform(self, m):
4030 """Set rotation, translation, and scale, from a 4x4 matrix. 4031 4032 :param m: The matrix to which the transform should be set.""" 4033 scale, rotation, translation = m.get_scale_rotation_translation() 4034 4035 self.scale = scale 4036 4037 self.rotation.m_11 = rotation.m_11 4038 self.rotation.m_12 = rotation.m_12 4039 self.rotation.m_13 = rotation.m_13 4040 self.rotation.m_21 = rotation.m_21 4041 self.rotation.m_22 = rotation.m_22 4042 self.rotation.m_23 = rotation.m_23 4043 self.rotation.m_31 = rotation.m_31 4044 self.rotation.m_32 = rotation.m_32 4045 self.rotation.m_33 = rotation.m_33 4046 4047 self.translation.x = translation.x 4048 self.translation.y = translation.y 4049 self.translation.z = translation.z
4050
4051 - def apply_scale(self, scale):
4052 """Apply scale factor on data. 4053 4054 :param scale: The scale factor.""" 4055 # apply scale on translation 4056 self.translation.x *= scale 4057 self.translation.y *= scale 4058 self.translation.z *= scale 4059 # apply scale on bounding box 4060 self.bounding_box.translation.x *= scale 4061 self.bounding_box.translation.y *= scale 4062 self.bounding_box.translation.z *= scale 4063 self.bounding_box.radius.x *= scale 4064 self.bounding_box.radius.y *= scale 4065 self.bounding_box.radius.z *= scale
4066
4067 - class NiBSplineCompTransformInterpolator:
4068 - def get_translations(self):
4069 """Return an iterator over all translation keys.""" 4070 return self._getCompKeys(self.translation_offset, 3, 4071 self.translation_bias, self.translation_multiplier)
4072
4073 - def get_rotations(self):
4074 """Return an iterator over all rotation keys.""" 4075 return self._getCompKeys(self.rotation_offset, 4, 4076 self.rotation_bias, self.rotation_multiplier)
4077
4078 - def get_scales(self):
4079 """Return an iterator over all scale keys.""" 4080 for key in self._getCompKeys(self.scale_offset, 1, 4081 self.scale_bias, self.scale_multiplier): 4082 yield key[0]
4083
4084 - def apply_scale(self, scale):
4085 """Apply scale factor on data.""" 4086 self.translation.x *= scale 4087 self.translation.y *= scale 4088 self.translation.z *= scale 4089 self.translation_bias *= scale 4090 self.translation_multiplier *= scale
4091
4092 - class NiBSplineData:
4093 """ 4094 >>> # a doctest 4095 >>> from pyffi.formats.nif import NifFormat 4096 >>> block = NifFormat.NiBSplineData() 4097 >>> block.num_short_control_points = 50 4098 >>> block.short_control_points.update_size() 4099 >>> for i in range(block.num_short_control_points): 4100 ... block.short_control_points[i] = 20 - i 4101 >>> list(block.get_short_data(12, 4, 3)) 4102 [(8, 7, 6), (5, 4, 3), (2, 1, 0), (-1, -2, -3)] 4103 >>> offset = block.append_short_data([(1,2),(4,3),(13,14),(8,2),(33,33)]) 4104 >>> offset 4105 50 4106 >>> list(block.get_short_data(offset, 5, 2)) 4107 [(1, 2), (4, 3), (13, 14), (8, 2), (33, 33)] 4108 >>> list(block.get_comp_data(offset, 5, 2, 10.0, 32767.0)) 4109 [(11.0, 12.0), (14.0, 13.0), (23.0, 24.0), (18.0, 12.0), (43.0, 43.0)] 4110 >>> block.append_float_data([(1.0,2.0),(3.0,4.0),(0.5,0.25)]) 4111 0 4112 >>> list(block.get_float_data(0, 3, 2)) 4113 [(1.0, 2.0), (3.0, 4.0), (0.5, 0.25)] 4114 >>> block.append_comp_data([(1,2),(4,3)]) 4115 (60, 2.5, 1.5) 4116 >>> list(block.get_short_data(60, 2, 2)) 4117 [(-32767, -10922), (32767, 10922)] 4118 >>> list(block.get_comp_data(60, 2, 2, 2.5, 1.5)) # doctest: +ELLIPSIS 4119 [(1.0, 2.00...), (4.0, 2.99...)] 4120 """
4121 - def _getData(self, offset, num_elements, element_size, controlpoints):
4122 """Helper function for get_float_data and get_short_data. For internal 4123 use only.""" 4124 # check arguments 4125 if not (controlpoints is self.float_control_points 4126 or controlpoints is self.short_control_points): 4127 raise ValueError("internal error while appending data") 4128 # parse the data 4129 for element in xrange(num_elements): 4130 yield tuple( 4131 controlpoints[offset + element * element_size + index] 4132 for index in xrange(element_size))
4133
4134 - def _appendData(self, data, controlpoints):
4135 """Helper function for append_float_data and append_short_data. For internal 4136 use only.""" 4137 # get number of elements 4138 num_elements = len(data) 4139 # empty list, do nothing 4140 if num_elements == 0: 4141 return 4142 # get element size 4143 element_size = len(data[0]) 4144 # store offset at which we append the data 4145 if controlpoints is self.float_control_points: 4146 offset = self.num_float_control_points 4147 self.num_float_control_points += num_elements * element_size 4148 elif controlpoints is self.short_control_points: 4149 offset = self.num_short_control_points 4150 self.num_short_control_points += num_elements * element_size 4151 else: 4152 raise ValueError("internal error while appending data") 4153 # update size 4154 controlpoints.update_size() 4155 # store the data 4156 for element, datum in enumerate(data): 4157 for index, value in enumerate(datum): 4158 controlpoints[offset + element * element_size + index] = value 4159 # return the offset 4160 return offset
4161
4162 - def get_short_data(self, offset, num_elements, element_size):
4163 """Get an iterator to the data. 4164 4165 :param offset: The offset in the data where to start. 4166 :param num_elements: Number of elements to get. 4167 :param element_size: Size of a single element. 4168 :return: A list of C{num_elements} tuples of size C{element_size}. 4169 """ 4170 return self._getData( 4171 offset, num_elements, element_size, self.short_control_points)
4172
4173 - def get_comp_data(self, offset, num_elements, element_size, bias, multiplier):
4174 """Get an interator to the data, converted to float with extra bias and 4175 multiplication factor. If C{x} is the short value, then the returned value 4176 is C{bias + x * multiplier / 32767.0}. 4177 4178 :param offset: The offset in the data where to start. 4179 :param num_elements: Number of elements to get. 4180 :param element_size: Size of a single element. 4181 :param bias: Value bias. 4182 :param multiplier: Value multiplier. 4183 :return: A list of C{num_elements} tuples of size C{element_size}. 4184 """ 4185 for key in self.get_short_data(offset, num_elements, element_size): 4186 yield tuple(bias + x * multiplier / 32767.0 for x in key)
4187
4188 - def append_short_data(self, data):
4189 """Append data. 4190 4191 :param data: A list of elements, where each element is a tuple of 4192 integers. (Note: cannot be an interator; maybe this restriction 4193 will be removed in a future version.) 4194 :return: The offset at which the data was appended.""" 4195 return self._appendData(data, self.short_control_points)
4196
4197 - def append_comp_data(self, data):
4198 """Append data as compressed list. 4199 4200 :param data: A list of elements, where each element is a tuple of 4201 integers. (Note: cannot be an interator; maybe this restriction 4202 will be removed in a future version.) 4203 :return: The offset, bias, and multiplier.""" 4204 # get extremes 4205 maxvalue = max(max(datum) for datum in data) 4206 minvalue = min(min(datum) for datum in data) 4207 # get bias and multiplier 4208 bias = 0.5 * (maxvalue + minvalue) 4209 if maxvalue > minvalue: 4210 multiplier = 0.5 * (maxvalue - minvalue) 4211 else: 4212 # no need to compress in this case 4213 multiplier = 1.0 4214 4215 # compress points into shorts 4216 shortdata = [] 4217 for datum in data: 4218 shortdata.append(tuple(int(32767 * (x - bias) / multiplier) 4219 for x in datum)) 4220 return (self._appendData(shortdata, self.short_control_points), 4221 bias, multiplier)
4222
4223 - def get_float_data(self, offset, num_elements, element_size):
4224 """Get an iterator to the data. 4225 4226 :param offset: The offset in the data where to start. 4227 :param num_elements: Number of elements to get. 4228 :param element_size: Size of a single element. 4229 :return: A list of C{num_elements} tuples of size C{element_size}. 4230 """ 4231 return self._getData( 4232 offset, num_elements, element_size, self.float_control_points)
4233
4234 - def append_float_data(self, data):
4235 """Append data. 4236 4237 :param data: A list of elements, where each element is a tuple of 4238 floats. (Note: cannot be an interator; maybe this restriction 4239 will be removed in a future version.) 4240 :return: The offset at which the data was appended.""" 4241 return self._appendData(data, self.float_control_points)
4242
4243 - class NiBSplineInterpolator:
4244 - def get_times(self):
4245 """Return an iterator over all key times. 4246 4247 @todo: When code for calculating the bsplines is ready, this function 4248 will return exactly self.basis_data.num_control_points - 1 time points, and 4249 not self.basis_data.num_control_points as it is now. 4250 """ 4251 # is there basis data? 4252 if not self.basis_data: 4253 return 4254 # return all times 4255 for i in xrange(self.basis_data.num_control_points): 4256 yield ( 4257 self.start_time 4258 + (i * (self.stop_time - self.start_time) 4259 / (self.basis_data.num_control_points - 1)) 4260 )
4261
4262 - def _getFloatKeys(self, offset, element_size):
4263 """Helper function to get iterator to various keys. Internal use only.""" 4264 # are there keys? 4265 if offset == 65535: 4266 return 4267 # is there basis data and spline data? 4268 if not self.basis_data or not self.spline_data: 4269 return 4270 # yield all keys 4271 for key in self.spline_data.get_float_data(offset, 4272 self.basis_data.num_control_points, 4273 element_size): 4274 yield key
4275
4276 - def _getCompKeys(self, offset, element_size, bias, multiplier):
4277 """Helper function to get iterator to various keys. Internal use only.""" 4278 # are there keys? 4279 if offset == 65535: 4280 return 4281 # is there basis data and spline data? 4282 if not self.basis_data or not self.spline_data: 4283 return 4284 # yield all keys 4285 for key in self.spline_data.get_comp_data(offset, 4286 self.basis_data.num_control_points, 4287 element_size, 4288 bias, multiplier): 4289 yield key
4290
4291 - class NiBSplineTransformInterpolator:
4292 - def get_translations(self):
4293 """Return an iterator over all translation keys.""" 4294 return self._getFloatKeys(self.translation_offset, 3)
4295
4296 - def get_rotations(self):
4297 """Return an iterator over all rotation keys.""" 4298 return self._getFloatKeys(self.rotation_offset, 4)
4299
4300 - def get_scales(self):
4301 """Return an iterator over all scale keys.""" 4302 for key in self._getFloatKeys(self.scale_offset, 1): 4303 yield key[0]
4304
4305 - def apply_scale(self, scale):
4306 """Apply scale factor on data.""" 4307 self.translation.x *= scale 4308 self.translation.y *= scale 4309 self.translation.z *= scale 4310 # also scale translation float keys 4311 if self.translation_offset != 65535: 4312 offset = self.translation_offset 4313 num_elements = self.basis_data.num_control_points 4314 element_size = 3 4315 controlpoints = self.spline_data.float_control_points 4316 for element in xrange(num_elements): 4317 for index in xrange(element_size): 4318 controlpoints[offset + element * element_size + index] *= scale
4319
4320 - class NiControllerSequence:
4321 - def add_controlled_block(self):
4322 """Create new controlled block, and return it. 4323 4324 >>> seq = NifFormat.NiControllerSequence() 4325 >>> seq.num_controlled_blocks 4326 0 4327 >>> ctrlblock = seq.add_controlled_block() 4328 >>> seq.num_controlled_blocks 4329 1 4330 >>> isinstance(ctrlblock, NifFormat.ControllerLink) 4331 True 4332 """ 4333 # add to the list 4334 num_blocks = self.num_controlled_blocks 4335 self.num_controlled_blocks = num_blocks + 1 4336 self.controlled_blocks.update_size() 4337 return self.controlled_blocks[-1]
4338
4339 - class NiGeometryData:
4340 """ 4341 >>> from pyffi.formats.nif import NifFormat 4342 >>> geomdata = NifFormat.NiGeometryData() 4343 >>> geomdata.num_vertices = 3 4344 >>> geomdata.has_vertices = True 4345 >>> geomdata.has_normals = True 4346 >>> geomdata.has_vertex_colors = True 4347 >>> geomdata.num_uv_sets = 2 4348 >>> geomdata.vertices.update_size() 4349 >>> geomdata.normals.update_size() 4350 >>> geomdata.vertex_colors.update_size() 4351 >>> geomdata.uv_sets.update_size() 4352 >>> geomdata.vertices[0].x = 1 4353 >>> geomdata.vertices[0].y = 2 4354 >>> geomdata.vertices[0].z = 3 4355 >>> geomdata.vertices[1].x = 4 4356 >>> geomdata.vertices[1].y = 5 4357 >>> geomdata.vertices[1].z = 6 4358 >>> geomdata.vertices[2].x = 1.200001 4359 >>> geomdata.vertices[2].y = 3.400001 4360 >>> geomdata.vertices[2].z = 5.600001 4361 >>> geomdata.normals[0].x = 0 4362 >>> geomdata.normals[0].y = 0 4363 >>> geomdata.normals[0].z = 1 4364 >>> geomdata.normals[1].x = 0 4365 >>> geomdata.normals[1].y = 1 4366 >>> geomdata.normals[1].z = 0 4367 >>> geomdata.normals[2].x = 1 4368 >>> geomdata.normals[2].y = 0 4369 >>> geomdata.normals[2].z = 0 4370 >>> geomdata.vertex_colors[1].r = 0.310001 4371 >>> geomdata.vertex_colors[1].g = 0.320001 4372 >>> geomdata.vertex_colors[1].b = 0.330001 4373 >>> geomdata.vertex_colors[1].a = 0.340001 4374 >>> geomdata.uv_sets[0][0].u = 0.990001 4375 >>> geomdata.uv_sets[0][0].v = 0.980001 4376 >>> geomdata.uv_sets[0][2].u = 0.970001 4377 >>> geomdata.uv_sets[0][2].v = 0.960001 4378 >>> geomdata.uv_sets[1][0].v = 0.910001 4379 >>> geomdata.uv_sets[1][0].v = 0.920001 4380 >>> geomdata.uv_sets[1][2].v = 0.930001 4381 >>> geomdata.uv_sets[1][2].v = 0.940001 4382 >>> for h in geomdata.get_vertex_hash_generator(): 4383 ... print(h) 4384 (1000, 2000, 3000, 0, 0, 1000, 99000, 98000, 0, 92000, 0, 0, 0, 0) 4385 (4000, 5000, 6000, 0, 1000, 0, 0, 0, 0, 0, 310, 320, 330, 340) 4386 (1200, 3400, 5600, 1000, 0, 0, 97000, 96000, 0, 94000, 0, 0, 0, 0) 4387 """
4388 - def update_center_radius(self):
4389 """Recalculate center and radius of the data.""" 4390 # in case there are no vertices, set center and radius to zero 4391 if len(self.vertices) == 0: 4392 self.center.x = 0.0 4393 self.center.y = 0.0 4394 self.center.z = 0.0 4395 self.radius = 0.0 4396 return 4397 4398 # find extreme values in x, y, and z direction 4399 lowx = min([v.x for v in self.vertices]) 4400 lowy = min([v.y for v in self.vertices]) 4401 lowz = min([v.z for v in self.vertices]) 4402 highx = max([v.x for v in self.vertices]) 4403 highy = max([v.y for v in self.vertices]) 4404 highz = max([v.z for v in self.vertices]) 4405 4406 # center is in the center of the bounding box 4407 cx = (lowx + highx) * 0.5 4408 cy = (lowy + highy) * 0.5 4409 cz = (lowz + highz) * 0.5 4410 self.center.x = cx 4411 self.center.y = cy 4412 self.center.z = cz 4413 4414 # radius is the largest distance from the center 4415 r2 = 0.0 4416 for v in self.vertices: 4417 dx = cx - v.x 4418 dy = cy - v.y 4419 dz = cz - v.z 4420 r2 = max(r2, dx*dx+dy*dy+dz*dz) 4421 self.radius = r2 ** 0.5
4422
4423 - def apply_scale(self, scale):
4424 """Apply scale factor on data.""" 4425 if abs(scale - 1.0) < NifFormat.EPSILON: return 4426 for v in self.vertices: 4427 v.x *= scale 4428 v.y *= scale 4429 v.z *= scale 4430 self.center.x *= scale 4431 self.center.y *= scale 4432 self.center.z *= scale 4433 self.radius *= scale
4434
4435 - def get_vertex_hash_generator( 4436 self, 4437 vertexprecision=3, normalprecision=3, 4438 uvprecision=5, vcolprecision=3):
4439 """Generator which produces a tuple of integers for each 4440 (vertex, normal, uv, vcol), to ease detection of duplicate 4441 vertices. The precision parameters denote number of 4442 significant digits behind the comma. 4443 4444 Default for uvprecision should really be high because for 4445 very large models the uv coordinates can be very close 4446 together. 4447 4448 For vertexprecision, 3 seems usually enough (maybe we'll 4449 have to increase this at some point). 4450 4451 :param vertexprecision: Precision to be used for vertices. 4452 :type vertexprecision: float 4453 :param normalprecision: Precision to be used for normals. 4454 :type normalprecision: float 4455 :param uvprecision: Precision to be used for uvs. 4456 :type uvprecision: float 4457 :param vcolprecision: Precision to be used for vertex colors. 4458 :type vcolprecision: float 4459 :return: A generator yielding a hash value for each vertex. 4460 """ 4461 4462 verts = self.vertices if self.has_vertices else None 4463 norms = self.normals if self.has_normals else None 4464 uvsets = self.uv_sets if len(self.uv_sets) else None 4465 vcols = self.vertex_colors if self.has_vertex_colors else None 4466 vertexfactor = 10 ** vertexprecision 4467 normalfactor = 10 ** normalprecision 4468 uvfactor = 10 ** uvprecision 4469 vcolfactor = 10 ** vcolprecision 4470 for i in xrange(self.num_vertices): 4471 h = [] 4472 if verts: 4473 h.extend([float_to_int(x * vertexfactor) 4474 for x in [verts[i].x, verts[i].y, verts[i].z]]) 4475 if norms: 4476 h.extend([float_to_int(x * normalfactor) 4477 for x in [norms[i].x, norms[i].y, norms[i].z]]) 4478 if uvsets: 4479 for uvset in uvsets: 4480 # uvs sometimes have NaN, for example: 4481 # oblivion/meshes/architecture/anvil/anvildooruc01.nif 4482 h.extend([float_to_int(x * uvfactor) 4483 for x in [uvset[i].u, uvset[i].v]]) 4484 if vcols: 4485 h.extend([float_to_int(x * vcolfactor) 4486 for x in [vcols[i].r, vcols[i].g, 4487 vcols[i].b, vcols[i].a]]) 4488 yield tuple(h)
4489
4490 - class NiGeometry:
4491 """ 4492 >>> from pyffi.formats.nif import NifFormat 4493 >>> id44 = NifFormat.Matrix44() 4494 >>> id44.set_identity() 4495 >>> skelroot = NifFormat.NiNode() 4496 >>> skelroot.name = 'skelroot' 4497 >>> skelroot.set_transform(id44) 4498 >>> bone1 = NifFormat.NiNode() 4499 >>> bone1.name = 'bone1' 4500 >>> bone1.set_transform(id44) 4501 >>> bone2 = NifFormat.NiNode() 4502 >>> bone2.name = 'bone2' 4503 >>> bone2.set_transform(id44) 4504 >>> bone21 = NifFormat.NiNode() 4505 >>> bone21.name = 'bone21' 4506 >>> bone21.set_transform(id44) 4507 >>> bone22 = NifFormat.NiNode() 4508 >>> bone22.name = 'bone22' 4509 >>> bone22.set_transform(id44) 4510 >>> bone211 = NifFormat.NiNode() 4511 >>> bone211.name = 'bone211' 4512 >>> bone211.set_transform(id44) 4513 >>> skelroot.add_child(bone1) 4514 >>> bone1.add_child(bone2) 4515 >>> bone2.add_child(bone21) 4516 >>> bone2.add_child(bone22) 4517 >>> bone21.add_child(bone211) 4518 >>> geom = NifFormat.NiTriShape() 4519 >>> geom.name = 'geom' 4520 >>> geom.set_transform(id44) 4521 >>> geomdata = NifFormat.NiTriShapeData() 4522 >>> skininst = NifFormat.NiSkinInstance() 4523 >>> skindata = NifFormat.NiSkinData() 4524 >>> skelroot.add_child(geom) 4525 >>> geom.data = geomdata 4526 >>> geom.skin_instance = skininst 4527 >>> skininst.skeleton_root = skelroot 4528 >>> skininst.data = skindata 4529 >>> skininst.num_bones = 4 4530 >>> skininst.bones.update_size() 4531 >>> skininst.bones[0] = bone1 4532 >>> skininst.bones[1] = bone2 4533 >>> skininst.bones[2] = bone22 4534 >>> skininst.bones[3] = bone211 4535 >>> skindata.num_bones = 4 4536 >>> skindata.bone_list.update_size() 4537 >>> [child.name for child in skelroot.children] 4538 ['bone1', 'geom'] 4539 >>> skindata.set_transform(id44) 4540 >>> for bonedata in skindata.bone_list: 4541 ... bonedata.set_transform(id44) 4542 >>> affectedbones = geom.flatten_skin() 4543 >>> [bone.name for bone in affectedbones] 4544 ['bone1', 'bone2', 'bone22', 'bone211'] 4545 >>> [child.name for child in skelroot.children] 4546 ['geom', 'bone1', 'bone21', 'bone2', 'bone22', 'bone211'] 4547 """
4548 - def is_skin(self):
4549 """Returns True if geometry is skinned.""" 4550 return self.skin_instance != None
4551
4552 - def _validate_skin(self):
4553 """Check that skinning blocks are valid. Will raise NifError exception 4554 if not.""" 4555 if self.skin_instance == None: return 4556 if self.skin_instance.data == None: 4557 raise NifFormat.NifError('NiGeometry has NiSkinInstance without NiSkinData') 4558 if self.skin_instance.skeleton_root == None: 4559 raise NifFormat.NifError('NiGeometry has NiSkinInstance without skeleton root') 4560 if self.skin_instance.num_bones != self.skin_instance.data.num_bones: 4561 raise NifFormat.NifError('NiSkinInstance and NiSkinData have different number of bones')
4562
4563 - def add_bone(self, bone, vert_weights):
4564 """Add bone with given vertex weights. 4565 After adding all bones, the geometry skinning information should be set 4566 from the current position of the bones using the L{update_bind_position} function. 4567 4568 :param bone: The bone NiNode block. 4569 :param vert_weights: A dictionary mapping each influenced vertex index to a vertex weight.""" 4570 self._validate_skin() 4571 skininst = self.skin_instance 4572 skindata = skininst.data 4573 skelroot = skininst.skeleton_root 4574 4575 bone_index = skininst.num_bones 4576 skininst.num_bones = bone_index+1 4577 skininst.bones.update_size() 4578 skininst.bones[bone_index] = bone 4579 skindata.num_bones = bone_index+1 4580 skindata.bone_list.update_size() 4581 skinbonedata = skindata.bone_list[bone_index] 4582 # set vertex weights 4583 skinbonedata.num_vertices = len(vert_weights) 4584 skinbonedata.vertex_weights.update_size() 4585 for i, (vert_index, vert_weight) in enumerate(vert_weights.iteritems()): 4586 skinbonedata.vertex_weights[i].index = vert_index 4587 skinbonedata.vertex_weights[i].weight = vert_weight
4588 4589 4590
4591 - def get_vertex_weights(self):
4592 """Get vertex weights in a convenient format: list bone and weight per 4593 vertex.""" 4594 # shortcuts relevant blocks 4595 if not self.skin_instance: 4596 raise NifFormat.NifError('Cannot get vertex weights of geometry without skin.') 4597 self._validate_skin() 4598 geomdata = self.data 4599 skininst = self.skin_instance 4600 skindata = skininst.data 4601 # XXX todo: should we use list of dictionaries for this 4602 # where each dict maps bone number to the weight? 4603 weights = [[] for i in xrange(geomdata.num_vertices)] 4604 for bonenum, bonedata in enumerate(skindata.bone_list): 4605 for skinweight in bonedata.vertex_weights: 4606 # skip zero weights 4607 if skinweight.weight != 0: 4608 # boneweightlist is the list of (bonenum, weight) pairs that 4609 # we must update now 4610 boneweightlist = weights[skinweight.index] 4611 # is bonenum already in there? 4612 for i, (otherbonenum, otherweight) in enumerate(boneweightlist): 4613 if otherbonenum == bonenum: 4614 # yes! add the weight to the bone 4615 boneweightlist[i][1] += skinweight.weight 4616 break 4617 else: 4618 # nope... so add new [bone, weight] entry 4619 boneweightlist.append([bonenum, skinweight.weight]) 4620 return weights
4621 4622
4623 - def flatten_skin(self):
4624 """Reposition all bone blocks and geometry block in the tree to be direct 4625 children of the skeleton root. 4626 4627 Returns list of all used bones by the skin.""" 4628 4629 if not self.is_skin(): return [] # nothing to do 4630 4631 result = [] # list of repositioned bones 4632 self._validate_skin() # validate the skin 4633 skininst = self.skin_instance 4634 skindata = skininst.data 4635 skelroot = skininst.skeleton_root 4636 4637 # reparent geometry 4638 self.set_transform(self.get_transform(skelroot)) 4639 geometry_parent = skelroot.find_chain(self, block_type = NifFormat.NiAVObject)[-2] 4640 geometry_parent.remove_child(self) # detatch geometry from tree 4641 skelroot.add_child(self, front = True) # and attatch it to the skeleton root 4642 4643 # reparent all the bone blocks 4644 for bone_block in skininst.bones: 4645 # skeleton root, if it is used as bone, does not need to be processed 4646 if bone_block == skelroot: continue 4647 # get bone parent 4648 bone_parent = skelroot.find_chain(bone_block, block_type = NifFormat.NiAVObject)[-2] 4649 # set new child transforms 4650 for child in bone_block.children: 4651 child.set_transform(child.get_transform(bone_parent)) 4652 # reparent children 4653 for child in bone_block.children: 4654 bone_parent.add_child(child) 4655 bone_block.num_children = 0 4656 bone_block.children.update_size() # = remove_child on each child 4657 # set new bone transform 4658 bone_block.set_transform(bone_block.get_transform(skelroot)) 4659 # reparent bone block 4660 bone_parent.remove_child(bone_block) 4661 skelroot.add_child(bone_block) 4662 result.append(bone_block) 4663 4664 return result
4665 4666 4667 4668 # The nif skinning algorithm works as follows (as of nifskope): 4669 # v' # vertex after skinning in geometry space 4670 # = sum over {b in skininst.bones} # sum over all bones b that influence the mesh 4671 # weight[v][b] # how much bone b influences vertex v 4672 # * v # vertex before skinning in geometry space (as it is stored in the shape data) 4673 # * skindata.bone_list[b].transform # transform vertex to bone b space in the rest pose 4674 # * b.get_transform(skelroot) # apply animation, by multiplying with all bone matrices in the chain down to the skeleton root; the vertex is now in skeleton root space 4675 # * skindata.transform # transforms vertex from skeleton root space back to geometry space
4676 - def get_skin_deformation(self):
4677 """Returns a list of vertices and normals in their final position after 4678 skinning, in geometry space.""" 4679 4680 if not self.data: return [], [] 4681 4682 if not self.is_skin(): return self.data.vertices, self.data.normals 4683 4684 self._validate_skin() 4685 skininst = self.skin_instance 4686 skindata = skininst.data 4687 skelroot = skininst.skeleton_root 4688 4689 vertices = [ NifFormat.Vector3() for i in xrange(self.data.num_vertices) ] 4690 normals = [ NifFormat.Vector3() for i in xrange(self.data.num_vertices) ] 4691 sumweights = [ 0.0 for i in xrange(self.data.num_vertices) ] 4692 skin_offset = skindata.get_transform() 4693 for i, bone_block in enumerate(skininst.bones): 4694 bonedata = skindata.bone_list[i] 4695 bone_offset = bonedata.get_transform() 4696 bone_matrix = bone_block.get_transform(skelroot) 4697 transform = bone_offset * bone_matrix * skin_offset 4698 scale, rotation, translation = transform.get_scale_rotation_translation() 4699 for skinweight in bonedata.vertex_weights: 4700 index = skinweight.index 4701 weight = skinweight.weight 4702 vertices[index] += weight * (self.data.vertices[index] * transform) 4703 if self.data.has_normals: 4704 normals[index] += weight * (self.data.normals[index] * rotation) 4705 sumweights[index] += weight 4706 4707 for i, s in enumerate(sumweights): 4708 if abs(s - 1.0) > 0.01: 4709 logging.getLogger("pyffi.nif.nigeometry").warn( 4710 "vertex %i has weights not summing to one" % i) 4711 4712 return vertices, normals
4713 4714 4715 4716 # ported and extended from niflib::NiNode::GoToSkeletonBindPosition() (r2518)
4718 """Send all bones to their bind position. 4719 4720 @deprecated: Use L{NifFormat.NiNode.send_bones_to_bind_position} instead of 4721 this function. 4722 """ 4723 4724 warnings.warn("use NifFormat.NiNode.send_bones_to_bind_position", 4725 DeprecationWarning) 4726 4727 if not self.is_skin(): 4728 return 4729 4730 # validate skin and set up quick links 4731 self._validate_skin() 4732 skininst = self.skin_instance 4733 skindata = skininst.data 4734 skelroot = skininst.skeleton_root 4735 4736 # reposition the bones 4737 for i, parent_bone in enumerate(skininst.bones): 4738 parent_offset = skindata.bone_list[i].get_transform() 4739 # if parent_bone is a child of the skeleton root, then fix its 4740 # transfrom 4741 if parent_bone in skelroot.children: 4742 parent_bone.set_transform(parent_offset.get_inverse() * self.get_transform(skelroot)) 4743 # fix the transform of all its children 4744 for j, child_bone in enumerate(skininst.bones): 4745 if child_bone not in parent_bone.children: continue 4746 child_offset = skindata.bone_list[j].get_transform() 4747 child_matrix = child_offset.get_inverse() * parent_offset 4748 child_bone.set_transform(child_matrix)
4749 4750 4751 4752 # ported from niflib::NiSkinData::ResetOffsets (r2561)
4753 - def update_bind_position(self):
4754 """Make current position of the bones the bind position for this geometry. 4755 4756 Sets the NiSkinData overall transform to the inverse of the geometry transform 4757 relative to the skeleton root, and sets the NiSkinData of each bone to 4758 the geometry transform relative to the skeleton root times the inverse of the bone 4759 transform relative to the skeleton root.""" 4760 if not self.is_skin(): return 4761 4762 # validate skin and set up quick links 4763 self._validate_skin() 4764 skininst = self.skin_instance 4765 skindata = skininst.data 4766 skelroot = skininst.skeleton_root 4767 4768 # calculate overall offset 4769 geomtransform = self.get_transform(skelroot) 4770 skindata.set_transform(geomtransform.get_inverse()) 4771 4772 # calculate bone offsets 4773 for i, bone in enumerate(skininst.bones): 4774 skindata.bone_list[i].set_transform(geomtransform * bone.get_transform(skelroot).get_inverse())
4775
4776 - def get_skin_partition(self):
4777 """Return the skin partition block.""" 4778 skininst = self.skin_instance 4779 if not skininst: 4780 skinpart = None 4781 else: 4782 skinpart = skininst.skin_partition 4783 if not skinpart: 4784 skindata = skininst.data 4785 if skindata: 4786 skinpart = skindata.skin_partition 4787 4788 return skinpart
4789
4790 - def set_skin_partition(self, skinpart):
4791 """Set skin partition block.""" 4792 skininst = self.skin_instance 4793 if not skininst: 4794 raise ValueError("Geometry has no skin instance.") 4795 4796 skindata = skininst.data 4797 if not skindata: 4798 raise ValueError("Geometry has no skin data.") 4799 4800 skininst.skin_partition = skinpart 4801 skindata.skin_partition = skinpart
4802
4803 - class NiKeyframeData:
4804 - def apply_scale(self, scale):
4805 """Apply scale factor on data.""" 4806 for key in self.translations.keys: 4807 key.value.x *= scale 4808 key.value.y *= scale 4809 key.value.z *= scale
4810 #key.forward.x *= scale 4811 #key.forward.y *= scale 4812 #key.forward.z *= scale 4813 #key.backward.x *= scale 4814 #key.backward.y *= scale 4815 #key.backward.z *= scale 4816 # what to do with TBC? 4817
4818 - class NiMaterialColorController:
4819 - def get_target_color(self):
4820 """Get target color (works for all nif versions).""" 4821 return ((self.flags >> 4) & 7) | self.target_color
4822
4823 - def set_target_color(self, target_color):
4824 """Set target color (works for all nif versions).""" 4825 self.flags |= (target_color & 7) << 4 4826 self.target_color = target_color
4827
4828 - class NiMorphData:
4829 - def apply_scale(self, scale):
4830 """Apply scale factor on data.""" 4831 for morph in self.morphs: 4832 for v in morph.vectors: 4833 v.x *= scale 4834 v.y *= scale 4835 v.z *= scale
4836
4837 - class NiNode:
4838 """ 4839 >>> from pyffi.formats.nif import NifFormat 4840 >>> x = NifFormat.NiNode() 4841 >>> y = NifFormat.NiNode() 4842 >>> z = NifFormat.NiNode() 4843 >>> x.num_children =1 4844 >>> x.children.update_size() 4845 >>> y in x.children 4846 False 4847 >>> x.children[0] = y 4848 >>> y in x.children 4849 True 4850 >>> x.add_child(z, front = True) 4851 >>> x.add_child(y) 4852 >>> x.num_children 4853 2 4854 >>> x.children[0] is z 4855 True 4856 >>> x.remove_child(y) 4857 >>> y in x.children 4858 False 4859 >>> x.num_children 4860 1 4861 >>> e = NifFormat.NiSpotLight() 4862 >>> x.add_effect(e) 4863 >>> x.num_effects 4864 1 4865 >>> e in x.effects 4866 True 4867 4868 >>> from pyffi.formats.nif import NifFormat 4869 >>> node = NifFormat.NiNode() 4870 >>> child1 = NifFormat.NiNode() 4871 >>> child1.name = "hello" 4872 >>> child_2 = NifFormat.NiNode() 4873 >>> child_2.name = "world" 4874 >>> node.get_children() 4875 [] 4876 >>> node.set_children([child1, child_2]) 4877 >>> [child.name for child in node.get_children()] 4878 ['hello', 'world'] 4879 >>> [child.name for child in node.children] 4880 ['hello', 'world'] 4881 >>> node.set_children([]) 4882 >>> node.get_children() 4883 [] 4884 >>> # now set them the other way around 4885 >>> node.set_children([child_2, child1]) 4886 >>> [child.name for child in node.get_children()] 4887 ['world', 'hello'] 4888 >>> [child.name for child in node.children] 4889 ['world', 'hello'] 4890 >>> node.remove_child(child_2) 4891 >>> [child.name for child in node.children] 4892 ['hello'] 4893 >>> node.add_child(child_2) 4894 >>> [child.name for child in node.children] 4895 ['hello', 'world'] 4896 4897 >>> from pyffi.formats.nif import NifFormat 4898 >>> node = NifFormat.NiNode() 4899 >>> effect1 = NifFormat.NiSpotLight() 4900 >>> effect1.name = "hello" 4901 >>> effect2 = NifFormat.NiSpotLight() 4902 >>> effect2.name = "world" 4903 >>> node.get_effects() 4904 [] 4905 >>> node.set_effects([effect1, effect2]) 4906 >>> [effect.name for effect in node.get_effects()] 4907 ['hello', 'world'] 4908 >>> [effect.name for effect in node.effects] 4909 ['hello', 'world'] 4910 >>> node.set_effects([]) 4911 >>> node.get_effects() 4912 [] 4913 >>> # now set them the other way around 4914 >>> node.set_effects([effect2, effect1]) 4915 >>> [effect.name for effect in node.get_effects()] 4916 ['world', 'hello'] 4917 >>> [effect.name for effect in node.effects] 4918 ['world', 'hello'] 4919 >>> node.remove_effect(effect2) 4920 >>> [effect.name for effect in node.effects] 4921 ['hello'] 4922 >>> node.add_effect(effect2) 4923 >>> [effect.name for effect in node.effects] 4924 ['hello', 'world'] 4925 """
4926 - def add_child(self, child, front=False):
4927 """Add block to child list. 4928 4929 :param child: The child to add. 4930 :type child: L{NifFormat.NiAVObject} 4931 :keyword front: Whether to add to the front or to the end of the 4932 list (default is at end). 4933 :type front: ``bool`` 4934 """ 4935 # check if it's already a child 4936 if child in self.children: 4937 return 4938 # increase number of children 4939 num_children = self.num_children 4940 self.num_children = num_children + 1 4941 self.children.update_size() 4942 # add the child 4943 if not front: 4944 self.children[num_children] = child 4945 else: 4946 for i in xrange(num_children, 0, -1): 4947 self.children[i] = self.children[i-1] 4948 self.children[0] = child
4949
4950 - def remove_child(self, child):
4951 """Remove a block from the child list. 4952 4953 :param child: The child to remove. 4954 :type child: L{NifFormat.NiAVObject} 4955 """ 4956 self.set_children([otherchild for otherchild in self.get_children() 4957 if not(otherchild is child)])
4958
4959 - def get_children(self):
4960 """Return a list of the children of the block. 4961 4962 :return: The list of children. 4963 :rtype: ``list`` of L{NifFormat.NiAVObject} 4964 """ 4965 return [child for child in self.children]
4966
4967 - def set_children(self, childlist):
4968 """Set the list of children from the given list (destroys existing list). 4969 4970 :param childlist: The list of child blocks to set. 4971 :type childlist: ``list`` of L{NifFormat.NiAVObject} 4972 """ 4973 self.num_children = len(childlist) 4974 self.children.update_size() 4975 for i, child in enumerate(childlist): 4976 self.children[i] = child
4977
4978 - def add_effect(self, effect):
4979 """Add an effect to the list of effects. 4980 4981 :param effect: The effect to add. 4982 :type effect: L{NifFormat.NiDynamicEffect} 4983 """ 4984 num_effs = self.num_effects 4985 self.num_effects = num_effs + 1 4986 self.effects.update_size() 4987 self.effects[num_effs] = effect
4988
4989 - def remove_effect(self, effect):
4990 """Remove a block from the effect list. 4991 4992 :param effect: The effect to remove. 4993 :type effect: L{NifFormat.NiDynamicEffect} 4994 """ 4995 self.set_effects([othereffect for othereffect in self.get_effects() 4996 if not(othereffect is effect)])
4997
4998 - def get_effects(self):
4999 """Return a list of the effects of the block. 5000 5001 :return: The list of effects. 5002 :rtype: ``list`` of L{NifFormat.NiDynamicEffect} 5003 """ 5004 return [effect for effect in self.effects]
5005
5006 - def set_effects(self, effectlist):
5007 """Set the list of effects from the given list (destroys existing list). 5008 5009 :param effectlist: The list of effect blocks to set. 5010 :type effectlist: ``list`` of L{NifFormat.NiDynamicEffect} 5011 """ 5012 self.num_effects = len(effectlist) 5013 self.effects.update_size() 5014 for i, effect in enumerate(effectlist): 5015 self.effects[i] = effect
5016
5017 - def merge_external_skeleton_root(self, skelroot):
5018 """Attach skinned geometry to self (which will be the new skeleton root of 5019 the nif at the given skeleton root). Use this function if you move a 5020 skinned geometry from one nif into a new nif file. The bone links will be 5021 updated to point to the tree at self, instead of to the external tree. 5022 """ 5023 # sanity check 5024 if self.name != skelroot.name: 5025 raise ValueError("skeleton root names do not match") 5026 5027 # get a dictionary mapping bone names to bone blocks 5028 bone_dict = {} 5029 for block in self.tree(): 5030 if isinstance(block, NifFormat.NiNode): 5031 if block.name: 5032 if block.name in bone_dict: 5033 raise ValueError( 5034 "multiple NiNodes with name %s" % block.name) 5035 bone_dict[block.name] = block 5036 5037 # add all non-bone children of the skeleton root to self 5038 for child in skelroot.get_children(): 5039 # skip empty children 5040 if not child: 5041 continue 5042 # skip bones 5043 if child.name in bone_dict: 5044 continue 5045 # not a bone, so add it 5046 self.add_child(child) 5047 # fix links to skeleton root and bones 5048 for externalblock in child.tree(): 5049 if isinstance(externalblock, NifFormat.NiSkinInstance): 5050 if not(externalblock.skeleton_root is skelroot): 5051 raise ValueError( 5052 "expected skeleton root %s but got %s" 5053 % (skelroot.name, externalblock.skeleton_root.name)) 5054 externalblock.skeleton_root = self 5055 for i, externalbone in enumerate(externalblock.bones): 5056 externalblock.bones[i] = bone_dict[externalbone.name]
5057
5058 - def merge_skeleton_roots(self):
5059 """This function will look for other geometries whose skeleton 5060 root is a (possibly indirect) child of this node. It will then 5061 reparent those geometries to this node. For example, it will unify 5062 the skeleton roots in Morrowind's cliffracer.nif file, or of the 5063 (official) body skins. This makes it much easier to import 5064 skeletons in for instance Blender: there will be only one skeleton 5065 root for each bone, over all geometries. 5066 5067 The merge fails for those geometries whose global skin data 5068 transform does not match the inverse geometry transform relative to 5069 the skeleton root (the maths does not work out in this case!) 5070 5071 Returns list of all new blocks that have been reparented (and 5072 added to the skeleton root children list), and a list of blocks 5073 for which the merge failed. 5074 """ 5075 logger = logging.getLogger("pyffi.nif.ninode") 5076 5077 result = [] # list of reparented blocks 5078 failed = [] # list of blocks that could not be reparented 5079 5080 id44 = NifFormat.Matrix44() 5081 id44.set_identity() 5082 5083 # find the root block (direct parent of skeleton root that connects to the geometry) for each of these geometries 5084 for geom in self.get_global_iterator(): 5085 # make sure we only do each geometry once 5086 if (geom in result) or (geom in failed): 5087 continue 5088 # only geometries 5089 if not isinstance(geom, NifFormat.NiGeometry): 5090 continue 5091 # only skins 5092 if not geom.is_skin(): 5093 continue 5094 # only if they have a different skeleton root 5095 if geom.skin_instance.skeleton_root is self: 5096 continue 5097 # check transforms 5098 if (geom.skin_instance.data.get_transform() 5099 * geom.get_transform(geom.skin_instance.skeleton_root) != id44): 5100 logger.warn( 5101 "can't rebase %s: global skin data transform does not match " 5102 "geometry transform relative to skeleton root" % geom.name) 5103 failed.append(geom) 5104 continue # skip this one 5105 # everything ok! 5106 # find geometry parent 5107 geomroot = geom.skin_instance.skeleton_root.find_chain(geom)[-2] 5108 # reparent 5109 logger.debug("detaching %s from %s" % (geom.name, geomroot.name)) 5110 geomroot.remove_child(geom) 5111 logger.debug("attaching %s to %s" % (geom.name, self.name)) 5112 self.add_child(geom) 5113 # set its new skeleton root 5114 geom.skin_instance.skeleton_root = self 5115 # fix transform 5116 geom.skin_instance.data.set_transform( 5117 geom.get_transform(self).get_inverse(fast=False)) 5118 # and signal that we reparented this block 5119 result.append(geom) 5120 5121 return result, failed
5122
5123 - def get_skinned_geometries(self):
5124 """This function yields all skinned geometries which have self as 5125 skeleton root. 5126 """ 5127 for geom in self.get_global_iterator(): 5128 if (isinstance(geom, NifFormat.NiGeometry) 5129 and geom.is_skin() 5130 and geom.skin_instance.skeleton_root is self): 5131 yield geom
5132
5134 """Call this on the skeleton root of geometries. This function will 5135 transform the geometries, such that all skin data transforms coincide, or 5136 at least coincide partially. 5137 5138 :return: A number quantifying the remaining difference between bind 5139 positions. 5140 :rtype: ``float`` 5141 """ 5142 # get logger 5143 logger = logging.getLogger("pyffi.nif.ninode") 5144 # maps bone name to bind position transform matrix (relative to 5145 # skeleton root) 5146 bone_bind_transform = {} 5147 # find all skinned geometries with self as skeleton root 5148 geoms = list(self.get_skinned_geometries()) 5149 # sort geometries by bone level 5150 # this ensures that "parent" geometries serve as reference for "child" 5151 # geometries 5152 sorted_geoms = [] 5153 for bone in self.get_global_iterator(): 5154 if not isinstance(bone, NifFormat.NiNode): 5155 continue 5156 for geom in geoms: 5157 if not geom in sorted_geoms: 5158 if bone in geom.skin_instance.bones: 5159 sorted_geoms.append(geom) 5160 geoms = sorted_geoms 5161 # now go over all geometries and synchronize their relative bind poses 5162 for geom in geoms: 5163 skininst = geom.skin_instance 5164 skindata = skininst.data 5165 # set difference matrix to identity 5166 diff = NifFormat.Matrix44() 5167 diff.set_identity() 5168 # go over all bones in current geometry, see if it has been visited 5169 # before 5170 for bonenode, bonedata in izip(skininst.bones, skindata.bone_list): 5171 # bonenode can be None; see pyffi issue #3114079 5172 if not bonenode: 5173 continue 5174 if bonenode.name in bone_bind_transform: 5175 # calculate difference 5176 # (see explanation below) 5177 diff = (bonedata.get_transform() 5178 * bone_bind_transform[bonenode.name] 5179 * geom.get_transform(self).get_inverse(fast=False)) 5180 break 5181 5182 if diff.is_identity(): 5183 logger.debug("%s is already in bind position" % geom.name) 5184 else: 5185 logger.info("fixing %s bind position" % geom.name) 5186 # explanation: 5187 # we must set the bonedata transform T' such that its bone bind 5188 # position matrix 5189 # T'^-1 * G 5190 # (where T' = the updated bonedata.get_transform() 5191 # and G = geom.get_transform(self)) 5192 # coincides with the desired matrix 5193 # B = bone_bind_transform[bonenode.name] 5194 # in other words: 5195 # T' = G * B^-1 5196 # or, with diff = D = T * B * G^-1 5197 # T' = D^-1 * T 5198 # to keep the geometry in sync, the vertices and normals must 5199 # be multiplied with D, e.g. v' = v * D 5200 # because the full transform 5201 # v * T * ... = v * D * D^-1 * T * ... = v' * T' * ... 5202 # must be kept invariant 5203 for bonenode, bonedata in izip(skininst.bones, skindata.bone_list): 5204 # bonenode can be None; see pyffi issue #3114079 5205 logger.debug( 5206 "transforming bind position of bone %s" 5207 % bonenode.name if bonenode else "<None>") 5208 bonedata.set_transform(diff.get_inverse(fast=False) 5209 * bonedata.get_transform()) 5210 # transform geometry 5211 logger.debug("transforming vertices and normals") 5212 for vert in geom.data.vertices: 5213 newvert = vert * diff 5214 vert.x = newvert.x 5215 vert.y = newvert.y 5216 vert.z = newvert.z 5217 for norm in geom.data.normals: 5218 newnorm = norm * diff.get_matrix_33() 5219 norm.x = newnorm.x 5220 norm.y = newnorm.y 5221 norm.z = newnorm.z 5222 5223 # store updated bind position for future reference 5224 for bonenode, bonedata in izip(skininst.bones, skindata.bone_list): 5225 # bonenode can be None; see pyffi issue #3114079 5226 if not bonenode: 5227 continue 5228 bone_bind_transform[bonenode.name] = ( 5229 bonedata.get_transform().get_inverse(fast=False) 5230 * geom.get_transform(self)) 5231 5232 # validation: check that bones share bind position 5233 bone_bind_transform = {} 5234 error = 0.0 5235 for geom in geoms: 5236 skininst = geom.skin_instance 5237 skindata = skininst.data 5238 # go over all bones in current geometry, see if it has been visited 5239 # before 5240 for bonenode, bonedata in izip(skininst.bones, skindata.bone_list): 5241 if not bonenode: 5242 # bonenode can be None; see pyffi issue #3114079 5243 continue 5244 if bonenode.name in bone_bind_transform: 5245 # calculate difference 5246 diff = ((bonedata.get_transform().get_inverse(fast=False) 5247 * geom.get_transform(self)) 5248 - bone_bind_transform[bonenode.name]) 5249 # calculate error (sup norm) 5250 error = max(error, 5251 max(max(abs(elem) for elem in row) 5252 for row in diff.as_list())) 5253 else: 5254 bone_bind_transform[bonenode.name] = ( 5255 bonedata.get_transform().get_inverse(fast=False) 5256 * geom.get_transform(self)) 5257 5258 logger.debug("Geometry bind position error is %f" % error) 5259 if error > 1e-3: 5260 logger.warning("Failed to send some geometries to bind position") 5261 return error
5262
5264 """Some nifs (in particular in Morrowind) have geometries that are skinned 5265 but that do not share bones. In such cases, send_geometries_to_bind_position 5266 cannot reposition them. This function will send such geometries to the 5267 position of their root node. 5268 5269 Examples of such nifs are the official Morrowind skins (after merging 5270 skeleton roots). 5271 5272 Returns list of detached geometries that have been moved. 5273 """ 5274 logger = logging.getLogger("pyffi.nif.ninode") 5275 geoms = list(self.get_skinned_geometries()) 5276 5277 # parts the geometries into sets that do not share bone influences 5278 # * first construct sets of bones, merge intersecting sets 5279 # * then check which geometries belong to which set 5280 # (note: bone can be None, see issue #3114079) 5281 bonesets = [ 5282 list(set(bone for bone in geom.skin_instance.bones if bone)) 5283 for geom in geoms] 5284 # the merged flag signals that we are still merging bones 5285 merged = True 5286 while merged: 5287 merged = False 5288 for boneset in bonesets: 5289 for other_boneset in bonesets: 5290 # skip if sets are identical 5291 if other_boneset is boneset: 5292 continue 5293 # if not identical, see if they can be merged 5294 if set(other_boneset) & set(boneset): 5295 # XXX hackish but works 5296 # calculate union 5297 updated_boneset = list(set(other_boneset) | set(boneset)) 5298 # and move all bones into one bone set 5299 del other_boneset[:] 5300 del boneset[:] 5301 boneset += updated_boneset 5302 merged = True 5303 # remove empty bone sets 5304 bonesets = list(boneset for boneset in bonesets if boneset) 5305 logger.debug("bones per partition are") 5306 for boneset in bonesets: 5307 logger.debug(str([bone.name for bone in boneset])) 5308 parts = [[geom for geom in geoms 5309 if set(geom.skin_instance.bones) & set(boneset)] 5310 for boneset in bonesets] 5311 logger.debug("geometries per partition are") 5312 for part in parts: 5313 logger.debug(str([geom.name for geom in part])) 5314 # if there is only one set, we are done 5315 if len(bonesets) <= 1: 5316 logger.debug("no detached geometries") 5317 return [] 5318 5319 # next, for each part, move all geometries so the lowest bone matches the 5320 # node transform 5321 for boneset, part in izip(bonesets, parts): 5322 logger.debug("moving part %s" % str([geom.name for geom in part])) 5323 # find "lowest" bone in the bone set 5324 lowest_dist = None 5325 lowest_bonenode = None 5326 for bonenode in boneset: 5327 dist = len(self.find_chain(bonenode)) 5328 if (lowest_dist is None) or (lowest_dist > dist): 5329 lowest_dist = dist 5330 lowest_bonenode = bonenode 5331 logger.debug("reference bone is %s" % lowest_bonenode.name) 5332 # find a geometry that has this bone 5333 for geom in part: 5334 for bonenode, bonedata in izip(geom.skin_instance.bones, 5335 geom.skin_instance.data.bone_list): 5336 if bonenode is lowest_bonenode: 5337 lowest_geom = geom 5338 lowest_bonedata = bonedata 5339 break 5340 else: 5341 continue 5342 break 5343 else: 5344 raise RuntimeError("no reference geometry with this bone: bug?") 5345 # calculate matrix 5346 diff = (lowest_bonedata.get_transform() 5347 * lowest_bonenode.get_transform(self) 5348 * lowest_geom.get_transform(self).get_inverse(fast=False)) 5349 if diff.is_identity(): 5350 logger.debug("%s is already in node position" 5351 % lowest_bonenode.name) 5352 continue 5353 # now go over all geometries and synchronize their position to the 5354 # reference bone 5355 for geom in part: 5356 logger.info("moving %s to node position" % geom.name) 5357 # XXX we're using this trick a few times now 5358 # XXX move it to a separate NiGeometry function 5359 skininst = geom.skin_instance 5360 skindata = skininst.data 5361 # explanation: 5362 # we must set the bonedata transform T' such that its bone bind 5363 # position matrix 5364 # T'^-1 * G 5365 # (where T' = the updated lowest_bonedata.get_transform() 5366 # and G = geom.get_transform(self)) 5367 # coincides with the desired matrix 5368 # B = lowest_bonenode.get_transform(self) 5369 # in other words: 5370 # T' = G * B^-1 5371 # or, with diff = D = T * B * G^-1 5372 # T' = D^-1 * T 5373 # to keep the geometry in sync, the vertices and normals must 5374 # be multiplied with D, e.g. v' = v * D 5375 # because the full transform 5376 # v * T * ... = v * D * D^-1 * T * ... = v' * T' * ... 5377 # must be kept invariant 5378 for bonenode, bonedata in izip(skininst.bones, skindata.bone_list): 5379 logger.debug("transforming bind position of bone %s" 5380 % bonenode.name) 5381 bonedata.set_transform(diff.get_inverse(fast=False) 5382 * bonedata.get_transform()) 5383 # transform geometry 5384 logger.debug("transforming vertices and normals") 5385 for vert in geom.data.vertices: 5386 newvert = vert * diff 5387 vert.x = newvert.x 5388 vert.y = newvert.y 5389 vert.z = newvert.z 5390 for norm in geom.data.normals: 5391 newnorm = norm * diff.get_matrix_33() 5392 norm.x = newnorm.x 5393 norm.y = newnorm.y 5394 norm.z = newnorm.z
5395
5397 """This function will send all bones of geometries of this skeleton root 5398 to their bind position. For best results, call 5399 L{send_geometries_to_bind_position} first. 5400 5401 :return: A number quantifying the remaining difference between bind 5402 positions. 5403 :rtype: ``float`` 5404 """ 5405 # get logger 5406 logger = logging.getLogger("pyffi.nif.ninode") 5407 # check all bones and bone datas to see if a bind position exists 5408 bonelist = [] 5409 error = 0.0 5410 geoms = list(self.get_skinned_geometries()) 5411 for geom in geoms: 5412 skininst = geom.skin_instance 5413 skindata = skininst.data 5414 for bonenode, bonedata in izip(skininst.bones, skindata.bone_list): 5415 # bonenode can be None; see pyffi issue #3114079 5416 if not bonenode: 5417 continue 5418 # make sure all bone data of shared bones coincides 5419 for othergeom, otherbonenode, otherbonedata in bonelist: 5420 if bonenode is otherbonenode: 5421 diff = ((otherbonedata.get_transform().get_inverse(fast=False) 5422 * 5423 othergeom.get_transform(self)) 5424 - 5425 (bonedata.get_transform().get_inverse(fast=False) 5426 * 5427 geom.get_transform(self))) 5428 if diff.sup_norm() > 1e-3: 5429 logger.warning("Geometries %s and %s do not share the same bind position: bone %s will be sent to a position matching only one of these" % (geom.name, othergeom.name, bonenode.name)) 5430 # break the loop 5431 break 5432 else: 5433 # the loop did not break, so the bone was not yet added 5434 # add it now 5435 logger.debug("Found bind position data for %s" % bonenode.name) 5436 bonelist.append((geom, bonenode, bonedata)) 5437 5438 # the algorithm simply makes all transforms correct by changing 5439 # each local bone matrix in such a way that the global matrix 5440 # relative to the skeleton root matches the skinning information 5441 5442 # this algorithm is numerically most stable if bones are traversed 5443 # in hierarchical order, so first sort the bones 5444 sorted_bonelist = [] 5445 for node in self.tree(): 5446 if not isinstance(node, NifFormat.NiNode): 5447 continue 5448 for geom, bonenode, bonedata in bonelist: 5449 if node is bonenode: 5450 sorted_bonelist.append((geom, bonenode, bonedata)) 5451 bonelist = sorted_bonelist 5452 # now reposition the bones 5453 for geom, bonenode, bonedata in bonelist: 5454 # explanation: 5455 # v * CHILD * PARENT * ... 5456 # = v * CHILD * DIFF^-1 * DIFF * PARENT * ... 5457 # and now choose DIFF such that DIFF * PARENT * ... = desired transform 5458 5459 # calculate desired transform relative to skeleton root 5460 # transform is DIFF * PARENT 5461 transform = (bonedata.get_transform().get_inverse(fast=False) 5462 * geom.get_transform(self)) 5463 # calculate difference 5464 diff = transform * bonenode.get_transform(self).get_inverse(fast=False) 5465 if not diff.is_identity(): 5466 logger.info("Sending %s to bind position" 5467 % bonenode.name) 5468 # fix transform of this node 5469 bonenode.set_transform(diff * bonenode.get_transform()) 5470 # fix transform of all its children 5471 diff_inv = diff.get_inverse(fast=False) 5472 for childnode in bonenode.children: 5473 if childnode: 5474 childnode.set_transform(childnode.get_transform() * diff_inv) 5475 else: 5476 logger.debug("%s is already in bind position" 5477 % bonenode.name) 5478 5479 # validate 5480 error = 0.0 5481 diff_error = 0.0 5482 for geom in geoms: 5483 skininst = geom.skin_instance 5484 skindata = skininst.data 5485 # calculate geometry transform 5486 geomtransform = geom.get_transform(self) 5487 # check skin data fields (also see NiGeometry.update_bind_position) 5488 for i, bone in enumerate(skininst.bones): 5489 # bone can be None; see pyffi issue #3114079 5490 if bone is None: 5491 continue 5492 diff = ((skindata.bone_list[i].get_transform().get_inverse(fast=False) 5493 * geomtransform) 5494 - bone.get_transform(self)) 5495 # calculate error (sup norm) 5496 diff_error = max(max(abs(elem) for elem in row) 5497 for row in diff.as_list()) 5498 if diff_error > 1e-3: 5499 logger.warning( 5500 "Failed to set bind position of bone %s for geometry %s (error is %f)" 5501 % (bone.name, geom.name, diff_error)) 5502 error = max(error, diff_error) 5503 5504 logger.debug("Bone bind position maximal error is %f" % error) 5505 if error > 1e-3: 5506 logger.warning("Failed to send some bones to bind position") 5507 return error
5508
5509 - class NiObjectNET:
5510 - def add_extra_data(self, extrablock):
5511 """Add block to extra data list and extra data chain. It is good practice 5512 to ensure that the extra data has empty next_extra_data field when adding it 5513 to avoid loops in the hierarchy.""" 5514 # add to the list 5515 num_extra = self.num_extra_data_list 5516 self.num_extra_data_list = num_extra + 1 5517 self.extra_data_list.update_size() 5518 self.extra_data_list[num_extra] = extrablock 5519 # add to the chain 5520 if not self.extra_data: 5521 self.extra_data = extrablock 5522 else: 5523 lastextra = self.extra_data 5524 while lastextra.next_extra_data: 5525 lastextra = lastextra.next_extra_data 5526 lastextra.next_extra_data = extrablock
5527
5528 - def remove_extra_data(self, extrablock):
5529 """Remove block from extra data list and extra data chain. 5530 5531 >>> from pyffi.formats.nif import NifFormat 5532 >>> block = NifFormat.NiNode() 5533 >>> block.num_extra_data_list = 3 5534 >>> block.extra_data_list.update_size() 5535 >>> extrablock = NifFormat.NiStringExtraData() 5536 >>> block.extra_data_list[1] = extrablock 5537 >>> block.remove_extra_data(extrablock) 5538 >>> [extra for extra in block.extra_data_list] 5539 [None, None] 5540 """ 5541 # remove from list 5542 new_extra_list = [] 5543 for extraother in self.extra_data_list: 5544 if not extraother is extrablock: 5545 new_extra_list.append(extraother) 5546 self.num_extra_data_list = len(new_extra_list) 5547 self.extra_data_list.update_size() 5548 for i, extraother in enumerate(new_extra_list): 5549 self.extra_data_list[i] = extraother 5550 # remove from chain 5551 if self.extra_data is extrablock: 5552 self.extra_data = extrablock.next_extra_data 5553 lastextra = self.extra_data 5554 while lastextra: 5555 if lastextra.next_extra_data is extrablock: 5556 lastextra.next_extra_data = lastextra.next_extra_data.next_extra_data 5557 lastextra = lastextra.next_extra_data
5558
5559 - def get_extra_datas(self):
5560 """Get a list of all extra data blocks.""" 5561 xtras = [xtra for xtra in self.extra_data_list] 5562 xtra = self.extra_data 5563 while xtra: 5564 if not xtra in self.extra_data_list: 5565 xtras.append(xtra) 5566 xtra = xtra.next_extra_data 5567 return xtras
5568
5569 - def set_extra_datas(self, extralist):
5570 """Set all extra data blocks from given list (erases existing data). 5571 5572 >>> from pyffi.formats.nif import NifFormat 5573 >>> node = NifFormat.NiNode() 5574 >>> extra1 = NifFormat.NiExtraData() 5575 >>> extra1.name = "hello" 5576 >>> extra2 = NifFormat.NiExtraData() 5577 >>> extra2.name = "world" 5578 >>> node.get_extra_datas() 5579 [] 5580 >>> node.set_extra_datas([extra1, extra2]) 5581 >>> [extra.name for extra in node.get_extra_datas()] 5582 ['hello', 'world'] 5583 >>> [extra.name for extra in node.extra_data_list] 5584 ['hello', 'world'] 5585 >>> node.extra_data is extra1 5586 True 5587 >>> extra1.next_extra_data is extra2 5588 True 5589 >>> extra2.next_extra_data is None 5590 True 5591 >>> node.set_extra_datas([]) 5592 >>> node.get_extra_datas() 5593 [] 5594 >>> # now set them the other way around 5595 >>> node.set_extra_datas([extra2, extra1]) 5596 >>> [extra.name for extra in node.get_extra_datas()] 5597 ['world', 'hello'] 5598 >>> [extra.name for extra in node.extra_data_list] 5599 ['world', 'hello'] 5600 >>> node.extra_data is extra2 5601 True 5602 >>> extra2.next_extra_data is extra1 5603 True 5604 >>> extra1.next_extra_data is None 5605 True 5606 5607 :param extralist: List of extra data blocks to add. 5608 :type extralist: ``list`` of L{NifFormat.NiExtraData} 5609 """ 5610 # set up extra data list 5611 self.num_extra_data_list = len(extralist) 5612 self.extra_data_list.update_size() 5613 for i, extra in enumerate(extralist): 5614 self.extra_data_list[i] = extra 5615 # set up extra data chain 5616 # first, kill the current chain 5617 self.extra_data = None 5618 # now reconstruct it 5619 if extralist: 5620 self.extra_data = extralist[0] 5621 lastextra = self.extra_data 5622 for extra in extralist[1:]: 5623 lastextra.next_extra_data = extra 5624 lastextra = extra 5625 lastextra.next_extra_data = None
5626
5627 - def add_controller(self, ctrlblock):
5628 """Add block to controller chain and set target of controller to self.""" 5629 if not self.controller: 5630 self.controller = ctrlblock 5631 else: 5632 lastctrl = self.controller 5633 while lastctrl.next_controller: 5634 lastctrl = lastctrl.next_controller 5635 lastctrl.next_controller = ctrlblock 5636 # set the target of the controller 5637 ctrlblock.target = self
5638
5639 - def get_controllers(self):
5640 """Get a list of all controllers.""" 5641 ctrls = [] 5642 ctrl = self.controller 5643 while ctrl: 5644 ctrls.append(ctrl) 5645 ctrl = ctrl.next_controller 5646 return ctrls
5647
5648 - def add_integer_extra_data(self, name, value):
5649 """Add a particular extra integer data block.""" 5650 extra = NifFormat.NiIntegerExtraData() 5651 extra.name = name 5652 extra.integer_data = value 5653 self.add_extra_data(extra)
5654
5655 - class NiObject:
5656 - def find(self, block_name = None, block_type = None):
5657 # does this block match the search criteria? 5658 if block_name and block_type: 5659 if isinstance(self, block_type): 5660 try: 5661 if block_name == self.name: return self 5662 except AttributeError: 5663 pass 5664 elif block_name: 5665 try: 5666 if block_name == self.name: return self 5667 except AttributeError: 5668 pass 5669 elif block_type: 5670 if isinstance(self, block_type): return self 5671 5672 # ok, this block is not a match, so check further down in tree 5673 for child in self.get_refs(): 5674 blk = child.find(block_name, block_type) 5675 if blk: return blk 5676 5677 return None
5678
5679 - def find_chain(self, block, block_type = None):
5680 """Finds a chain of blocks going from C{self} to C{block}. If found, 5681 self is the first element and block is the last element. If no branch 5682 found, returns an empty list. Does not check whether there is more 5683 than one branch; if so, the first one found is returned. 5684 5685 :param block: The block to find a chain to. 5686 :param block_type: The type that blocks should have in this chain.""" 5687 5688 if self is block: return [self] 5689 for child in self.get_refs(): 5690 if block_type and not isinstance(child, block_type): continue 5691 child_chain = child.find_chain(block, block_type) 5692 if child_chain: 5693 return [self] + child_chain 5694 5695 return []
5696
5697 - def apply_scale(self, scale):
5698 """Scale data in this block. This implementation does nothing. 5699 Override this method if it contains geometry data that can be 5700 scaled. 5701 """ 5702 pass
5703
5704 - def tree(self, block_type = None, follow_all = True, unique = False):
5705 """A generator for parsing all blocks in the tree (starting from and 5706 including C{self}). 5707 5708 :param block_type: If not ``None``, yield only blocks of the type C{block_type}. 5709 :param follow_all: If C{block_type} is not ``None``, then if this is ``True`` the function will parse the whole tree. Otherwise, the function will not follow branches that start by a non-C{block_type} block. 5710 5711 :param unique: Whether the generator can return the same block twice or not.""" 5712 # unique blocks: reduce this to the case of non-unique blocks 5713 if unique: 5714 block_list = [] 5715 for block in self.tree(block_type = block_type, follow_all = follow_all, unique = False): 5716 if not block in block_list: 5717 yield block 5718 block_list.append(block) 5719 return 5720 5721 # yield self 5722 if not block_type: 5723 yield self 5724 elif isinstance(self, block_type): 5725 yield self 5726 elif not follow_all: 5727 return # don't recurse further 5728 5729 # yield tree attached to each child 5730 for child in self.get_refs(): 5731 for block in child.tree(block_type = block_type, follow_all = follow_all): 5732 yield block
5733
5734 - def _validateTree(self):
5735 """Raises ValueError if there is a cycle in the tree.""" 5736 # If the tree is parsed, then each block should be visited once. 5737 # However, as soon as some cycle is present, parsing the tree 5738 # will visit some child more than once (and as a consequence, infinitely 5739 # many times). So, walk the reference tree and check that every block is 5740 # only visited once. 5741 children = [] 5742 for child in self.tree(): 5743 if child in children: 5744 raise ValueError('cyclic references detected') 5745 children.append(child)
5746
5747 - def is_interchangeable(self, other):
5748 """Are the two blocks interchangeable? 5749 5750 @todo: Rely on AnyType, SimpleType, ComplexType, etc. implementation. 5751 """ 5752 if isinstance(self, (NifFormat.NiProperty, NifFormat.NiSourceTexture)): 5753 # use hash for properties and source textures 5754 return ((self.__class__ is other.__class__) 5755 and (self.get_hash() == other.get_hash())) 5756 else: 5757 # for blocks with references: quick check only 5758 return self is other
5759
5760 - class NiMaterialProperty:
5761 - def is_interchangeable(self, other):
5762 """Are the two material blocks interchangeable?""" 5763 specialnames = ("envmap2", "envmap", "skin", "hair", 5764 "dynalpha", "hidesecret", "lava") 5765 if self.__class__ is not other.__class__: 5766 return False 5767 if (self.name.lower() in specialnames 5768 or other.name.lower() in specialnames): 5769 # do not ignore name 5770 return self.get_hash() == other.get_hash() 5771 else: 5772 # ignore name 5773 return self.get_hash()[1:] == other.get_hash()[1:]
5774
5775 - class ATextureRenderData:
5776 - def save_as_dds(self, stream):
5777 """Save image as DDS file.""" 5778 # set up header and pixel data 5779 data = pyffi.formats.dds.DdsFormat.Data() 5780 header = data.header 5781 pixeldata = data.pixeldata 5782 5783 # create header, depending on the format 5784 if self.pixel_format in (NifFormat.PixelFormat.PX_FMT_RGB8, 5785 NifFormat.PixelFormat.PX_FMT_RGBA8): 5786 # uncompressed RGB(A) 5787 header.flags.caps = 1 5788 header.flags.height = 1 5789 header.flags.width = 1 5790 header.flags.pixel_format = 1 5791 header.flags.mipmap_count = 1 5792 header.flags.linear_size = 1 5793 header.height = self.mipmaps[0].height 5794 header.width = self.mipmaps[0].width 5795 header.linear_size = len(self.pixel_data) 5796 header.mipmap_count = len(self.mipmaps) 5797 header.pixel_format.flags.rgb = 1 5798 header.pixel_format.bit_count = self.bits_per_pixel 5799 if not self.channels: 5800 header.pixel_format.r_mask = self.red_mask 5801 header.pixel_format.g_mask = self.green_mask 5802 header.pixel_format.b_mask = self.blue_mask 5803 header.pixel_format.a_mask = self.alpha_mask 5804 else: 5805 bit_pos = 0 5806 for i, channel in enumerate(self.channels): 5807 mask = (2 ** channel.bits_per_channel - 1) << bit_pos 5808 if channel.type == NifFormat.ChannelType.CHNL_RED: 5809 header.pixel_format.r_mask = mask 5810 elif channel.type == NifFormat.ChannelType.CHNL_GREEN: 5811 header.pixel_format.g_mask = mask 5812 elif channel.type == NifFormat.ChannelType.CHNL_BLUE: 5813 header.pixel_format.b_mask = mask 5814 elif channel.type == NifFormat.ChannelType.CHNL_ALPHA: 5815 header.pixel_format.a_mask = mask 5816 bit_pos += channel.bits_per_channel 5817 header.caps_1.complex = 1 5818 header.caps_1.texture = 1 5819 header.caps_1.mipmap = 1 5820 if self.pixel_data: 5821 # used in older nif versions 5822 pixeldata.set_value(self.pixel_data) 5823 else: 5824 # used in newer nif versions 5825 pixeldata.set_value(''.join(self.pixel_data_matrix)) 5826 elif self.pixel_format == NifFormat.PixelFormat.PX_FMT_DXT1: 5827 # format used in Megami Tensei: Imagine and Bully SE 5828 header.flags.caps = 1 5829 header.flags.height = 1 5830 header.flags.width = 1 5831 header.flags.pixel_format = 1 5832 header.flags.mipmap_count = 1 5833 header.flags.linear_size = 0 5834 header.height = self.mipmaps[0].height 5835 header.width = self.mipmaps[0].width 5836 header.linear_size = 0 5837 header.mipmap_count = len(self.mipmaps) 5838 header.pixel_format.flags.four_c_c = 1 5839 header.pixel_format.four_c_c = pyffi.formats.dds.DdsFormat.FourCC.DXT1 5840 header.pixel_format.bit_count = 0 5841 header.pixel_format.r_mask = 0 5842 header.pixel_format.g_mask = 0 5843 header.pixel_format.b_mask = 0 5844 header.pixel_format.a_mask = 0 5845 header.caps_1.complex = 1 5846 header.caps_1.texture = 1 5847 header.caps_1.mipmap = 1 5848 if isinstance(self, 5849 NifFormat.NiPersistentSrcTextureRendererData): 5850 pixeldata.set_value( 5851 ''.join( 5852 ''.join([chr(x) for x in tex]) 5853 for tex in self.pixel_data)) 5854 else: 5855 pixeldata.set_value(''.join(self.pixel_data_matrix)) 5856 elif self.pixel_format in (NifFormat.PixelFormat.PX_FMT_DXT5, 5857 NifFormat.PixelFormat.PX_FMT_DXT5_ALT): 5858 # format used in Megami Tensei: Imagine 5859 header.flags.caps = 1 5860 header.flags.height = 1 5861 header.flags.width = 1 5862 header.flags.pixel_format = 1 5863 header.flags.mipmap_count = 1 5864 header.flags.linear_size = 0 5865 header.height = self.mipmaps[0].height 5866 header.width = self.mipmaps[0].width 5867 header.linear_size = 0 5868 header.mipmap_count = len(self.mipmaps) 5869 header.pixel_format.flags.four_c_c = 1 5870 header.pixel_format.four_c_c = pyffi.formats.dds.DdsFormat.FourCC.DXT5 5871 header.pixel_format.bit_count = 0 5872 header.pixel_format.r_mask = 0 5873 header.pixel_format.g_mask = 0 5874 header.pixel_format.b_mask = 0 5875 header.pixel_format.a_mask = 0 5876 header.caps_1.complex = 1 5877 header.caps_1.texture = 1 5878 header.caps_1.mipmap = 1 5879 pixeldata.set_value(''.join(self.pixel_data_matrix)) 5880 else: 5881 raise ValueError( 5882 "cannot save pixel format %i as DDS" % self.pixel_format) 5883 5884 data.write(stream)
5885
5886 - class NiSkinData:
5887 - def get_transform(self):
5888 """Return scale, rotation, and translation into a single 4x4 matrix.""" 5889 mat = NifFormat.Matrix44() 5890 mat.set_scale_rotation_translation(self.scale, self.rotation, self.translation) 5891 return mat
5892
5893 - def set_transform(self, mat):
5894 """Set rotation, transform, and velocity.""" 5895 scale, rotation, translation = mat.get_scale_rotation_translation() 5896 5897 self.scale = scale 5898 5899 self.rotation.m_11 = rotation.m_11 5900 self.rotation.m_12 = rotation.m_12 5901 self.rotation.m_13 = rotation.m_13 5902 self.rotation.m_21 = rotation.m_21 5903 self.rotation.m_22 = rotation.m_22 5904 self.rotation.m_23 = rotation.m_23 5905 self.rotation.m_31 = rotation.m_31 5906 self.rotation.m_32 = rotation.m_32 5907 self.rotation.m_33 = rotation.m_33 5908 5909 self.translation.x = translation.x 5910 self.translation.y = translation.y 5911 self.translation.z = translation.z
5912
5913 - def apply_scale(self, scale):
5914 """Apply scale factor on data. 5915 5916 >>> from pyffi.formats.nif import NifFormat 5917 >>> id44 = NifFormat.Matrix44() 5918 >>> id44.set_identity() 5919 >>> skelroot = NifFormat.NiNode() 5920 >>> skelroot.name = 'Scene Root' 5921 >>> skelroot.set_transform(id44) 5922 >>> bone1 = NifFormat.NiNode() 5923 >>> bone1.name = 'bone1' 5924 >>> bone1.set_transform(id44) 5925 >>> bone1.translation.x = 10 5926 >>> skelroot.add_child(bone1) 5927 >>> geom = NifFormat.NiTriShape() 5928 >>> geom.set_transform(id44) 5929 >>> skelroot.add_child(geom) 5930 >>> skininst = NifFormat.NiSkinInstance() 5931 >>> geom.skin_instance = skininst 5932 >>> skininst.skeleton_root = skelroot 5933 >>> skindata = NifFormat.NiSkinData() 5934 >>> skininst.data = skindata 5935 >>> skindata.set_transform(id44) 5936 >>> geom.add_bone(bone1, {}) 5937 >>> geom.update_bind_position() 5938 >>> bone1.translation.x 5939 10.0 5940 >>> skindata.bone_list[0].translation.x 5941 -10.0 5942 >>> import pyffi.spells.nif.fix 5943 >>> import pyffi.spells.nif 5944 >>> data = NifFormat.Data() 5945 >>> data.roots = [skelroot] 5946 >>> toaster = pyffi.spells.nif.NifToaster() 5947 >>> toaster.scale = 0.1 5948 >>> pyffi.spells.nif.fix.SpellScale(data=data, toaster=toaster).recurse() 5949 pyffi.toaster:INFO:--- fix_scale --- 5950 pyffi.toaster:INFO: scaling by factor 0.100000 5951 pyffi.toaster:INFO: ~~~ NiNode [Scene Root] ~~~ 5952 pyffi.toaster:INFO: ~~~ NiNode [bone1] ~~~ 5953 pyffi.toaster:INFO: ~~~ NiTriShape [] ~~~ 5954 pyffi.toaster:INFO: ~~~ NiSkinInstance [] ~~~ 5955 pyffi.toaster:INFO: ~~~ NiSkinData [] ~~~ 5956 >>> bone1.translation.x 5957 1.0 5958 >>> skindata.bone_list[0].translation.x 5959 -1.0 5960 """ 5961 5962 self.translation.x *= scale 5963 self.translation.y *= scale 5964 self.translation.z *= scale 5965 5966 for skindata in self.bone_list: 5967 skindata.translation.x *= scale 5968 skindata.translation.y *= scale 5969 skindata.translation.z *= scale 5970 skindata.bounding_sphere_offset.x *= scale 5971 skindata.bounding_sphere_offset.y *= scale 5972 skindata.bounding_sphere_offset.z *= scale 5973 skindata.bounding_sphere_radius *= scale
5974
5975 - class NiTransformInterpolator:
5976 - def apply_scale(self, scale):
5977 """Apply scale factor <scale> on data.""" 5978 # apply scale on translation 5979 self.translation.x *= scale 5980 self.translation.y *= scale 5981 self.translation.z *= scale
5982
5983 - class NiTriBasedGeomData:
5984 - def is_interchangeable(self, other):
5985 """Heuristically checks if two NiTriBasedGeomData blocks describe 5986 the same geometry, that is, if they can be used interchangeably in 5987 a nif file without affecting the rendering. The check is not fool 5988 proof but has shown to work in most practical cases. 5989 5990 :param other: Another geometry data block. 5991 :type other: L{NifFormat.NiTriBasedGeomData} (if it has another type 5992 then the function will always return ``False``) 5993 :return: ``True`` if the geometries are equivalent, ``False`` otherwise. 5994 """ 5995 # check for object identity 5996 if self is other: 5997 return True 5998 5999 # type check 6000 if not isinstance(other, NifFormat.NiTriBasedGeomData): 6001 return False 6002 6003 # check class 6004 if (not isinstance(self, other.__class__) 6005 or not isinstance(other, self.__class__)): 6006 return False 6007 6008 # check some trivial things first 6009 for attribute in ( 6010 "num_vertices", "keep_flags", "compress_flags", "has_vertices", 6011 "num_uv_sets", "has_normals", "center", "radius", 6012 "has_vertex_colors", "has_uv", "consistency_flags"): 6013 if getattr(self, attribute) != getattr(other, attribute): 6014 return False 6015 6016 # check vertices (this includes uvs, vcols and normals) 6017 verthashes1 = [hsh for hsh in self.get_vertex_hash_generator()] 6018 verthashes2 = [hsh for hsh in other.get_vertex_hash_generator()] 6019 for hash1 in verthashes1: 6020 if not hash1 in verthashes2: 6021 return False 6022 for hash2 in verthashes2: 6023 if not hash2 in verthashes1: 6024 return False 6025 6026 # check triangle list 6027 triangles1 = [tuple(verthashes1[i] for i in tri) 6028 for tri in self.get_triangles()] 6029 triangles2 = [tuple(verthashes2[i] for i in tri) 6030 for tri in other.get_triangles()] 6031 for tri1 in triangles1: 6032 if not tri1 in triangles2: 6033 return False 6034 for tri2 in triangles2: 6035 if not tri2 in triangles1: 6036 return False 6037 6038 # looks pretty identical! 6039 return True
6040
6041 - def get_triangle_indices(self, triangles):
6042 """Yield list of triangle indices (relative to 6043 self.get_triangles()) of given triangles. Degenerate triangles in 6044 the list are assigned index ``None``. 6045 6046 >>> from pyffi.formats.nif import NifFormat 6047 >>> geomdata = NifFormat.NiTriShapeData() 6048 >>> geomdata.set_triangles([(0,1,2),(1,2,3),(2,3,4)]) 6049 >>> list(geomdata.get_triangle_indices([(1,2,3)])) 6050 [1] 6051 >>> list(geomdata.get_triangle_indices([(3,1,2)])) 6052 [1] 6053 >>> list(geomdata.get_triangle_indices([(2,3,1)])) 6054 [1] 6055 >>> list(geomdata.get_triangle_indices([(1,2,0),(4,2,3)])) 6056 [0, 2] 6057 >>> list(geomdata.get_triangle_indices([(0,0,0),(4,2,3)])) 6058 [None, 2] 6059 >>> list(geomdata.get_triangle_indices([(0,3,4),(4,2,3)])) # doctest: +ELLIPSIS 6060 Traceback (most recent call last): 6061 ... 6062 ValueError: ... 6063 6064 :param triangles: An iterable of triangles to check. 6065 :type triangles: iterator or list of tuples of three ints 6066 """ 6067 def triangleHash(triangle): 6068 """Calculate hash of a non-degenerate triangle. 6069 Returns ``None`` if the triangle is degenerate. 6070 """ 6071 if triangle[0] < triangle[1] and triangle[0] < triangle[2]: 6072 return hash((triangle[0], triangle[1], triangle[2])) 6073 elif triangle[1] < triangle[0] and triangle[1] < triangle[2]: 6074 return hash((triangle[1], triangle[2], triangle[0])) 6075 elif triangle[2] < triangle[0] and triangle[2] < triangle[1]: 6076 return hash((triangle[2], triangle[0], triangle[1]))
6077 6078 # calculate hashes of all triangles in the geometry 6079 self_triangles_hashes = [ 6080 triangleHash(triangle) for triangle in self.get_triangles()] 6081 6082 # calculate index of each triangle in the list of triangles 6083 for triangle in triangles: 6084 triangle_hash = triangleHash(triangle) 6085 if triangle_hash is None: 6086 yield None 6087 else: 6088 yield self_triangles_hashes.index(triangle_hash)
6089
6090 - class NiTriBasedGeom:
6091 - def get_tangent_space(self):
6092 """Return iterator over normal, tangent, bitangent vectors. 6093 If the block has no tangent space, then returns None. 6094 """ 6095 6096 def bytes2vectors(data, pos, num): 6097 for i in xrange(num): 6098 # data[pos:pos+12] is not really well implemented, so do this 6099 vecdata = ''.join(data[j] for j in xrange(pos, pos + 12)) 6100 vec = NifFormat.Vector3() 6101 # XXX _byte_order! assuming little endian 6102 vec.x, vec.y, vec.z = struct.unpack('<fff', vecdata) 6103 yield vec 6104 pos += 12
6105 6106 6107 if self.data.num_vertices == 0: 6108 return () 6109 6110 if not self.data.normals: 6111 #raise ValueError('geometry has no normals') 6112 return None 6113 6114 if (not self.data.tangents) or (not self.data.bitangents): 6115 # no tangents and bitangents at the usual location 6116 # perhaps there is Oblivion style data? 6117 for extra in self.get_extra_datas(): 6118 if isinstance(extra, NifFormat.NiBinaryExtraData): 6119 if extra.name == 'Tangent space (binormal & tangent vectors)': 6120 break 6121 else: 6122 #raise ValueError('geometry has no tangents') 6123 return None 6124 if 24 * self.data.num_vertices != len(extra.binary_data): 6125 raise ValueError( 6126 'tangent space data has invalid size, expected %i bytes but got %i' 6127 % (24 * self.data.num_vertices, len(extra.binary_data))) 6128 tangents = bytes2vectors(extra.binary_data, 6129 0, 6130 self.data.num_vertices) 6131 bitangents = bytes2vectors(extra.binary_data, 6132 12 * self.data.num_vertices, 6133 self.data.num_vertices) 6134 else: 6135 tangents = self.data.tangents 6136 bitangents = self.data.bitangents 6137 6138 return izip(self.data.normals, tangents, bitangents)
6139
6140 - def update_tangent_space( 6141 self, as_extra=None, 6142 vertexprecision=3, normalprecision=3):
6143 """Recalculate tangent space data. 6144 6145 :param as_extra: Whether to store the tangent space data as extra data 6146 (as in Oblivion) or not (as in Fallout 3). If not set, switches to 6147 Oblivion if an extra data block is found, otherwise does default. 6148 Set it to override this detection (for example when using this 6149 function to create tangent space data) and force behaviour. 6150 """ 6151 # check that self.data exists and is valid 6152 if not isinstance(self.data, NifFormat.NiTriBasedGeomData): 6153 raise ValueError( 6154 'cannot update tangent space of a geometry with %s data' 6155 %(self.data.__class__ if self.data else 'no')) 6156 6157 verts = self.data.vertices 6158 norms = self.data.normals 6159 if len(self.data.uv_sets) > 0: 6160 uvs = self.data.uv_sets[0] 6161 else: 6162 # no uv sets so no tangent space 6163 # we clear the tangents space flag just 6164 # happens in Fallout NV 6165 # meshes/architecture/bouldercity/arcadeendl.nif 6166 # (see issue #3218751) 6167 self.data.num_uv_sets &= ~4096 6168 self.data.bs_num_uv_sets &= ~4096 6169 return 6170 6171 # check that shape has norms and uvs 6172 if len(uvs) == 0 or len(norms) == 0: return 6173 6174 # identify identical (vertex, normal) pairs to avoid issues along 6175 # uv seams due to vertex duplication 6176 # implementation note: uvprecision and vcolprecision 0 6177 # should be enough, but use -2 just to be really sure 6178 # that this is ignored 6179 v_hash_map = list( 6180 self.data.get_vertex_hash_generator( 6181 vertexprecision=vertexprecision, 6182 normalprecision=normalprecision, 6183 uvprecision=-2, 6184 vcolprecision=-2)) 6185 6186 # tangent and binormal dictionaries by vertex hash 6187 bin = dict((h, NifFormat.Vector3()) for h in v_hash_map) 6188 tan = dict((h, NifFormat.Vector3()) for h in v_hash_map) 6189 6190 # calculate tangents and binormals from vertex and texture coordinates 6191 for t1, t2, t3 in self.data.get_triangles(): 6192 # find hash values 6193 h1 = v_hash_map[t1] 6194 h2 = v_hash_map[t2] 6195 h3 = v_hash_map[t3] 6196 # skip degenerate triangles 6197 if h1 == h2 or h2 == h3 or h3 == h1: 6198 continue 6199 6200 v_1 = verts[t1] 6201 v_2 = verts[t2] 6202 v_3 = verts[t3] 6203 w1 = uvs[t1] 6204 w2 = uvs[t2] 6205 w3 = uvs[t3] 6206 v_2v_1 = v_2 - v_1 6207 v_3v_1 = v_3 - v_1 6208 w2w1 = w2 - w1 6209 w3w1 = w3 - w1 6210 6211 # surface of triangle in texture space 6212 r = w2w1.u * w3w1.v - w3w1.u * w2w1.v 6213 6214 # sign of surface 6215 r_sign = (1 if r >= 0 else -1) 6216 6217 # contribution of this triangle to tangents and binormals 6218 sdir = NifFormat.Vector3() 6219 sdir.x = w3w1.v * v_2v_1.x - w2w1.v * v_3v_1.x 6220 sdir.y = w3w1.v * v_2v_1.y - w2w1.v * v_3v_1.y 6221 sdir.z = w3w1.v * v_2v_1.z - w2w1.v * v_3v_1.z 6222 sdir *= r_sign 6223 try: 6224 sdir.normalize() 6225 except ZeroDivisionError: # catches zero vector 6226 continue # skip triangle 6227 except ValueError: # catches invalid data 6228 continue # skip triangle 6229 6230 tdir = NifFormat.Vector3() 6231 tdir.x = w2w1.u * v_3v_1.x - w3w1.u * v_2v_1.x 6232 tdir.y = w2w1.u * v_3v_1.y - w3w1.u * v_2v_1.y 6233 tdir.z = w2w1.u * v_3v_1.z - w3w1.u * v_2v_1.z 6234 tdir *= r_sign 6235 try: 6236 tdir.normalize() 6237 except ZeroDivisionError: # catches zero vector 6238 continue # skip triangle 6239 except ValueError: # catches invalid data 6240 continue # skip triangle 6241 6242 # vector combination algorithm could possibly be improved 6243 for h in [h1, h2, h3]: 6244 tan[h] += tdir 6245 bin[h] += sdir 6246 6247 xvec = NifFormat.Vector3() 6248 xvec.x = 1.0 6249 xvec.y = 0.0 6250 xvec.z = 0.0 6251 yvec = NifFormat.Vector3() 6252 yvec.x = 0.0 6253 yvec.y = 1.0 6254 yvec.z = 0.0 6255 for n, h in izip(norms, v_hash_map): 6256 try: 6257 n.normalize() 6258 except (ValueError, ZeroDivisionError): 6259 # this happens if the normal has NAN values or is zero 6260 # just pick something in that case 6261 n = yvec 6262 try: 6263 # turn n, bin, tan into a base via Gram-Schmidt 6264 bin[h] -= n * (n * bin[h]) 6265 bin[h].normalize() 6266 tan[h] -= n * (n * tan[h]) 6267 tan[h] -= bin[h] * (bin[h] * tan[h]) 6268 tan[h].normalize() 6269 except ZeroDivisionError: 6270 # insuffient data to set tangent space for this vertex 6271 # in that case pick a space 6272 bin[h] = xvec.crossproduct(n) 6273 try: 6274 bin[h].normalize() 6275 except ZeroDivisionError: 6276 bin[h] = yvec.crossproduct(n) 6277 bin[h].normalize() # should work now 6278 tan[h] = n.crossproduct(bin[h]) 6279 6280 # tangent and binormal lists by vertex index 6281 tan = [tan[h] for h in v_hash_map] 6282 bin = [bin[h] for h in v_hash_map] 6283 6284 # find possible extra data block 6285 for extra in self.get_extra_datas(): 6286 if isinstance(extra, NifFormat.NiBinaryExtraData): 6287 if extra.name == 'Tangent space (binormal & tangent vectors)': 6288 break 6289 else: 6290 extra = None 6291 6292 # if autodetection is on, do as_extra only if an extra data block is found 6293 if as_extra is None: 6294 if extra: 6295 as_extra = True 6296 else: 6297 as_extra = False 6298 6299 if as_extra: 6300 # if tangent space extra data already exists, use it 6301 if not extra: 6302 # otherwise, create a new block and link it 6303 extra = NifFormat.NiBinaryExtraData() 6304 extra.name = 'Tangent space (binormal & tangent vectors)' 6305 self.add_extra_data(extra) 6306 6307 # write the data 6308 binarydata = "" 6309 for vec in tan + bin: 6310 # XXX _byte_order!! assuming little endian 6311 binarydata += struct.pack('<fff', vec.x, vec.y, vec.z) 6312 extra.binary_data = binarydata 6313 else: 6314 # set tangent space flag 6315 # XXX used to be 61440 6316 # XXX from Sid Meier's Railroad & Fallout 3 nifs, 4096 is 6317 # XXX sufficient? 6318 self.data.num_uv_sets |= 4096 6319 self.data.bs_num_uv_sets |= 4096 6320 self.data.tangents.update_size() 6321 self.data.bitangents.update_size() 6322 for vec, data_tan in izip(tan, self.data.tangents): 6323 data_tan.x = vec.x 6324 data_tan.y = vec.y 6325 data_tan.z = vec.z 6326 for vec, data_bitan in izip(bin, self.data.bitangents): 6327 data_bitan.x = vec.x 6328 data_bitan.y = vec.y 6329 data_bitan.z = vec.z
6330 6331 # ported from nifskope/skeleton.cpp:spSkinPartition
6332 - def update_skin_partition(self, 6333 maxbonesperpartition=4, maxbonespervertex=4, 6334 verbose=0, stripify=True, stitchstrips=False, 6335 padbones=False, 6336 triangles=None, trianglepartmap=None, 6337 maximize_bone_sharing=False):
6338 """Recalculate skin partition data. 6339 6340 :deprecated: Do not use the verbose argument. 6341 :param maxbonesperpartition: Maximum number of bones in each partition. 6342 The num_bones field will not exceed this number. 6343 :param maxbonespervertex: Maximum number of bones per vertex. 6344 The num_weights_per_vertex field will be exactly equal to this number. 6345 :param verbose: Ignored, and deprecated. Set pyffi's log level instead. 6346 :param stripify: If true, stripify the partitions, otherwise use triangles. 6347 :param stitchstrips: If stripify is true, then set this to true to stitch 6348 the strips. 6349 :param padbones: Enforces the numbones field to be equal to 6350 maxbonesperpartition. Also ensures that the bone indices are unique 6351 and sorted, per vertex. Raises an exception if maxbonespervertex 6352 is not equal to maxbonesperpartition (in that case bone indices cannot 6353 be unique and sorted). This options is required for Freedom Force vs. 6354 the 3rd Reich skin partitions. 6355 :param triangles: The triangles of the partition (if not specified, then 6356 this defaults to C{self.data.get_triangles()}. 6357 :param trianglepartmap: Maps each triangle to a partition index. Faces with 6358 different indices will never appear in the same partition. If the skin 6359 instance is a BSDismemberSkinInstance, then these indices are used as 6360 body part types, and the partitions in the BSDismemberSkinInstance are 6361 updated accordingly. Note that the faces are counted relative to 6362 L{triangles}. 6363 :param maximize_bone_sharing: Maximize bone sharing between partitions. 6364 This option is useful for Fallout 3. 6365 """ 6366 logger = logging.getLogger("pyffi.nif.nitribasedgeom") 6367 6368 # if trianglepartmap not specified, map everything to index 0 6369 if trianglepartmap is None: 6370 trianglepartmap = repeat(0) 6371 6372 # shortcuts relevant blocks 6373 if not self.skin_instance: 6374 # no skin, nothing to do 6375 return 6376 self._validate_skin() 6377 geomdata = self.data 6378 skininst = self.skin_instance 6379 skindata = skininst.data 6380 6381 # get skindata vertex weights 6382 logger.debug("Getting vertex weights.") 6383 weights = self.get_vertex_weights() 6384 6385 # count minimum and maximum number of bones per vertex 6386 minbones = min(len(weight) for weight in weights) 6387 maxbones = max(len(weight) for weight in weights) 6388 if minbones <= 0: 6389 noweights = [v for v, weight in enumerate(weights) 6390 if not weight] 6391 #raise ValueError( 6392 logger.warn( 6393 'bad NiSkinData: some vertices have no weights %s' 6394 % noweights) 6395 logger.info("Counted minimum of %i and maximum of %i bones per vertex" 6396 % (minbones, maxbones)) 6397 6398 # reduce bone influences to meet maximum number of bones per vertex 6399 logger.info("Imposing maximum of %i bones per vertex." % maxbonespervertex) 6400 lostweight = 0.0 6401 for weight in weights: 6402 if len(weight) > maxbonespervertex: 6403 # delete bone influences with least weight 6404 weight.sort(key=lambda x: x[1], reverse=True) # sort by weight 6405 # save lost weight to return to user 6406 lostweight = max( 6407 lostweight, max( 6408 [x[1] for x in weight[maxbonespervertex:]])) 6409 del weight[maxbonespervertex:] # only keep first elements 6410 # normalize 6411 totalweight = sum([x[1] for x in weight]) # sum of all weights 6412 for x in weight: x[1] /= totalweight 6413 maxbones = maxbonespervertex 6414 # sort by again by bone (relied on later when matching vertices) 6415 weight.sort(key=lambda x: x[0]) 6416 6417 # reduce bone influences to meet maximum number of bones per partition 6418 # (i.e. maximum number of bones per triangle) 6419 logger.info( 6420 "Imposing maximum of %i bones per triangle (and hence, per partition)." 6421 % maxbonesperpartition) 6422 6423 if triangles is None: 6424 triangles = geomdata.get_triangles() 6425 6426 for tri in triangles: 6427 while True: 6428 # find the bones influencing this triangle 6429 tribones = [] 6430 for t in tri: 6431 tribones.extend([bonenum for bonenum, boneweight in weights[t]]) 6432 tribones = set(tribones) 6433 # target met? 6434 if len(tribones) <= maxbonesperpartition: 6435 break 6436 # no, need to remove a bone 6437 6438 # sum weights for each bone to find the one that least influences 6439 # this triangle 6440 tribonesweights = {} 6441 for bonenum in tribones: tribonesweights[bonenum] = 0.0 6442 nono = set() # bones with weight 1 cannot be removed 6443 for skinweights in [weights[t] for t in tri]: 6444 # skinweights[0] is the first skinweight influencing vertex t 6445 # and skinweights[0][0] is the bone number of that bone 6446 if len(skinweights) == 1: nono.add(skinweights[0][0]) 6447 for bonenum, boneweight in skinweights: 6448 tribonesweights[bonenum] += boneweight 6449 6450 # select a bone to remove 6451 # first find bones we can remove 6452 6453 # restrict to bones not in the nono set 6454 tribonesweights = [ 6455 x for x in tribonesweights.items() if x[0] not in nono] 6456 if not tribonesweights: 6457 raise ValueError( 6458 "cannot remove anymore bones in this skin; " 6459 "increase maxbonesperpartition and try again") 6460 # sort by vertex weight sum the last element of this list is now a 6461 # candidate for removal 6462 tribonesweights.sort(key=lambda x: x[1], reverse=True) 6463 minbone = tribonesweights[-1][0] 6464 6465 # remove minbone from all vertices of this triangle and from all 6466 # matching vertices 6467 for t in tri: 6468 for tt in [t]: #match[t]: 6469 # remove bone 6470 weight = weights[tt] 6471 for i, (bonenum, boneweight) in enumerate(weight): 6472 if bonenum == minbone: 6473 # save lost weight to return to user 6474 lostweight = max(lostweight, boneweight) 6475 del weight[i] 6476 break 6477 else: 6478 continue 6479 # normalize 6480 totalweight = sum([x[1] for x in weight]) 6481 for x in weight: 6482 x[1] /= totalweight 6483 6484 # split triangles into partitions 6485 logger.info("Creating partitions") 6486 parts = [] 6487 # keep creating partitions as long as there are triangles left 6488 while triangles: 6489 # create a partition 6490 part = [set(), [], None] # bones, triangles, partition index 6491 usedverts = set() 6492 addtriangles = True 6493 # keep adding triangles to it as long as the flag is set 6494 while addtriangles: 6495 # newtriangles is a list of triangles that have not been added to 6496 # the partition, similar for newtrianglepartmap 6497 newtriangles = [] 6498 newtrianglepartmap = [] 6499 for tri, partindex in izip(triangles, trianglepartmap): 6500 # find the bones influencing this triangle 6501 tribones = [] 6502 for t in tri: 6503 tribones.extend([ 6504 bonenum for bonenum, boneweight in weights[t]]) 6505 tribones = set(tribones) 6506 # if part has no bones, 6507 # or if part has all bones of tribones and index coincides 6508 # then add this triangle to this part 6509 if ((not part[0]) 6510 or ((part[0] >= tribones) and (part[2] == partindex))): 6511 part[0] |= tribones 6512 part[1].append(tri) 6513 usedverts |= set(tri) 6514 # if part was empty, assign it the index 6515 if part[2] is None: 6516 part[2] = partindex 6517 else: 6518 newtriangles.append(tri) 6519 newtrianglepartmap.append(partindex) 6520 triangles = newtriangles 6521 trianglepartmap = newtrianglepartmap 6522 6523 # if we have room left in the partition 6524 # then add adjacent triangles 6525 addtriangles = False 6526 newtriangles = [] 6527 newtrianglepartmap = [] 6528 if len(part[0]) < maxbonesperpartition: 6529 for tri, partindex in izip(triangles, trianglepartmap): 6530 # if triangle is adjacent, and has same index 6531 # then check if it can be added to the partition 6532 if (usedverts & set(tri)) and (part[2] == partindex): 6533 # find the bones influencing this triangle 6534 tribones = [] 6535 for t in tri: 6536 tribones.extend([ 6537 bonenum for bonenum, boneweight in weights[t]]) 6538 tribones = set(tribones) 6539 # and check if we exceed the maximum number of allowed 6540 # bones 6541 if len(part[0] | tribones) <= maxbonesperpartition: 6542 part[0] |= tribones 6543 part[1].append(tri) 6544 usedverts |= set(tri) 6545 # signal another try in adding triangles to 6546 # the partition 6547 addtriangles = True 6548 else: 6549 newtriangles.append(tri) 6550 newtrianglepartmap.append(partindex) 6551 else: 6552 newtriangles.append(tri) 6553 newtrianglepartmap.append(partindex) 6554 triangles = newtriangles 6555 trianglepartmap = newtrianglepartmap 6556 6557 parts.append(part) 6558 6559 logger.info("Created %i small partitions." % len(parts)) 6560 6561 # merge all partitions 6562 logger.info("Merging partitions.") 6563 merged = True # signals success, in which case do another run 6564 while merged: 6565 merged = False 6566 # newparts is to contain the updated merged partitions as we go 6567 newparts = [] 6568 # addedparts is the set of all partitions from parts that have been 6569 # added to newparts 6570 addedparts = set() 6571 # try all combinations 6572 for a, parta in enumerate(parts): 6573 if a in addedparts: 6574 continue 6575 newparts.append(parta) 6576 addedparts.add(a) 6577 for b, partb in enumerate(parts): 6578 if b <= a: 6579 continue 6580 if b in addedparts: 6581 continue 6582 # if partition indices are the same, and bone limit is not 6583 # exceeded, merge them 6584 if ((parta[2] == partb[2]) 6585 and (len(parta[0] | partb[0]) <= maxbonesperpartition)): 6586 parta[0] |= partb[0] 6587 parta[1] += partb[1] 6588 addedparts.add(b) 6589 merged = True # signal another try in merging partitions 6590 # update partitions to the merged partitions 6591 parts = newparts 6592 6593 # write the NiSkinPartition 6594 logger.info("Skin has %i partitions." % len(parts)) 6595 6596 # if skin partition already exists, use it 6597 if skindata.skin_partition != None: 6598 skinpart = skindata.skin_partition 6599 skininst.skin_partition = skinpart 6600 elif skininst.skin_partition != None: 6601 skinpart = skininst.skin_partition 6602 skindata.skin_partition = skinpart 6603 else: 6604 # otherwise, create a new block and link it 6605 skinpart = NifFormat.NiSkinPartition() 6606 skindata.skin_partition = skinpart 6607 skininst.skin_partition = skinpart 6608 6609 # set number of partitions 6610 skinpart.num_skin_partition_blocks = len(parts) 6611 skinpart.skin_partition_blocks.update_size() 6612 6613 # maximize bone sharing, if requested 6614 if maximize_bone_sharing: 6615 logger.info("Maximizing shared bones.") 6616 # new list of partitions, sorted to maximize bone sharing 6617 newparts = [] 6618 # as long as there are parts to add 6619 while parts: 6620 # current set of partitions with shared bones 6621 # starts a new set of partitions with shared bones 6622 sharedparts = [parts.pop()] 6623 sharedboneset = sharedparts[0][0] 6624 # go over all other partitions, and try to add them with 6625 # shared bones 6626 oldparts = parts[:] 6627 parts = [] 6628 for otherpart in oldparts: 6629 # check if bones can be added 6630 if len(sharedboneset | otherpart[0]) <= maxbonesperpartition: 6631 # ok, we can share bones! 6632 # update set of shared bones 6633 sharedboneset |= otherpart[0] 6634 # add this other partition to list of shared parts 6635 sharedparts.append(otherpart) 6636 # update bone set in all shared parts 6637 for sharedpart in sharedparts: 6638 sharedpart[0] = sharedboneset 6639 else: 6640 # not added to sharedparts, 6641 # so we must keep it for the next iteration 6642 parts.append(otherpart) 6643 # update list of partitions 6644 newparts.extend(sharedparts) 6645 6646 # store update 6647 parts = newparts 6648 6649 # for Fallout 3, set dismember partition indices 6650 if isinstance(skininst, NifFormat.BSDismemberSkinInstance): 6651 skininst.num_partitions = len(parts) 6652 skininst.partitions.update_size() 6653 lastpart = None 6654 for bodypart, part in izip(skininst.partitions, parts): 6655 bodypart.body_part = part[2] 6656 if (lastpart is None) or (lastpart[0] != part[0]): 6657 # start new bone set, if bones are not shared 6658 bodypart.part_flag.start_new_boneset = 1 6659 else: 6660 # do not start new bone set 6661 bodypart.part_flag.start_new_boneset = 0 6662 # caps are invisible 6663 bodypart.part_flag.editor_visible = (part[2] < 100 6664 or part[2] >= 1000) 6665 # store part for next iteration 6666 lastpart = part 6667 6668 for skinpartblock, part in zip(skinpart.skin_partition_blocks, parts): 6669 # get sorted list of bones 6670 bones = sorted(list(part[0])) 6671 triangles = part[1] 6672 logger.info("Optimizing triangle ordering in partition %i" 6673 % parts.index(part)) 6674 # optimize triangles for vertex cache and calculate strips 6675 triangles = pyffi.utils.vertex_cache.get_cache_optimized_triangles( 6676 triangles) 6677 strips = pyffi.utils.vertex_cache.stable_stripify( 6678 triangles, stitchstrips=stitchstrips) 6679 triangles_size = 3 * len(triangles) 6680 strips_size = len(strips) + sum(len(strip) for strip in strips) 6681 vertices = [] 6682 # decide whether to use strip or triangles as primitive 6683 if stripify is None: 6684 stripifyblock = ( 6685 strips_size < triangles_size 6686 and all(len(strip) < 65536 for strip in strips)) 6687 else: 6688 stripifyblock = stripify 6689 if stripifyblock: 6690 # stripify the triangles 6691 # also update triangle list 6692 numtriangles = 0 6693 # calculate number of triangles and get sorted 6694 # list of vertices 6695 # for optimal performance, vertices must be sorted 6696 # by strip 6697 for strip in strips: 6698 numtriangles += len(strip) - 2 6699 for t in strip: 6700 if t not in vertices: 6701 vertices.append(t) 6702 else: 6703 numtriangles = len(triangles) 6704 # get sorted list of vertices 6705 # for optimal performance, vertices must be sorted 6706 # by triangle 6707 for tri in triangles: 6708 for t in tri: 6709 if t not in vertices: 6710 vertices.append(t) 6711 # set all the data 6712 skinpartblock.num_vertices = len(vertices) 6713 skinpartblock.num_triangles = numtriangles 6714 if not padbones: 6715 skinpartblock.num_bones = len(bones) 6716 else: 6717 if maxbonesperpartition != maxbonespervertex: 6718 raise ValueError( 6719 "when padding bones maxbonesperpartition must be " 6720 "equal to maxbonespervertex") 6721 # freedom force vs. the 3rd reich needs exactly 4 bones per 6722 # partition on every partition block 6723 skinpartblock.num_bones = maxbonesperpartition 6724 if stripifyblock: 6725 skinpartblock.num_strips = len(strips) 6726 else: 6727 skinpartblock.num_strips = 0 6728 # maxbones would be enough as num_weights_per_vertex but the Gamebryo 6729 # engine doesn't like that, it seems to want exactly 4 even if there 6730 # are fewer 6731 skinpartblock.num_weights_per_vertex = maxbonespervertex 6732 skinpartblock.bones.update_size() 6733 for i, bonenum in enumerate(bones): 6734 skinpartblock.bones[i] = bonenum 6735 for i in xrange(len(bones), skinpartblock.num_bones): 6736 skinpartblock.bones[i] = 0 # dummy bone slots refer to first bone 6737 skinpartblock.has_vertex_map = True 6738 skinpartblock.vertex_map.update_size() 6739 for i, v in enumerate(vertices): 6740 skinpartblock.vertex_map[i] = v 6741 skinpartblock.has_vertex_weights = True 6742 skinpartblock.vertex_weights.update_size() 6743 for i, v in enumerate(vertices): 6744 for j in xrange(skinpartblock.num_weights_per_vertex): 6745 if j < len(weights[v]): 6746 skinpartblock.vertex_weights[i][j] = weights[v][j][1] 6747 else: 6748 skinpartblock.vertex_weights[i][j] = 0.0 6749 if stripifyblock: 6750 skinpartblock.has_faces = True 6751 skinpartblock.strip_lengths.update_size() 6752 for i, strip in enumerate(strips): 6753 skinpartblock.strip_lengths[i] = len(strip) 6754 skinpartblock.strips.update_size() 6755 for i, strip in enumerate(strips): 6756 for j, v in enumerate(strip): 6757 skinpartblock.strips[i][j] = vertices.index(v) 6758 else: 6759 skinpartblock.has_faces = True 6760 # clear strip lengths array 6761 skinpartblock.strip_lengths.update_size() 6762 # clear strips array 6763 skinpartblock.strips.update_size() 6764 skinpartblock.triangles.update_size() 6765 for i, (v_1,v_2,v_3) in enumerate(triangles): 6766 skinpartblock.triangles[i].v_1 = vertices.index(v_1) 6767 skinpartblock.triangles[i].v_2 = vertices.index(v_2) 6768 skinpartblock.triangles[i].v_3 = vertices.index(v_3) 6769 skinpartblock.has_bone_indices = True 6770 skinpartblock.bone_indices.update_size() 6771 for i, v in enumerate(vertices): 6772 # the boneindices set keeps track of indices that have not been 6773 # used yet 6774 boneindices = set(range(skinpartblock.num_bones)) 6775 for j in xrange(len(weights[v])): 6776 skinpartblock.bone_indices[i][j] = bones.index(weights[v][j][0]) 6777 boneindices.remove(skinpartblock.bone_indices[i][j]) 6778 for j in xrange(len(weights[v]),skinpartblock.num_weights_per_vertex): 6779 if padbones: 6780 # if padbones is True then we have enforced 6781 # num_bones == num_weights_per_vertex so this will not trigger 6782 # a KeyError 6783 skinpartblock.bone_indices[i][j] = boneindices.pop() 6784 else: 6785 skinpartblock.bone_indices[i][j] = 0 6786 6787 # sort weights 6788 for i, v in enumerate(vertices): 6789 vweights = [] 6790 for j in xrange(skinpartblock.num_weights_per_vertex): 6791 vweights.append([ 6792 skinpartblock.bone_indices[i][j], 6793 skinpartblock.vertex_weights[i][j]]) 6794 if padbones: 6795 # by bone index (for ffvt3r) 6796 vweights.sort(key=lambda w: w[0]) 6797 else: 6798 # by weight (for fallout 3, largest weight first) 6799 vweights.sort(key=lambda w: -w[1]) 6800 for j in xrange(skinpartblock.num_weights_per_vertex): 6801 skinpartblock.bone_indices[i][j] = vweights[j][0] 6802 skinpartblock.vertex_weights[i][j] = vweights[j][1] 6803 6804 return lostweight
6805 6806 # ported from nifskope/skeleton.cpp:spFixBoneBounds
6807 - def update_skin_center_radius(self):
6808 """Update centers and radii of all skin data fields.""" 6809 # shortcuts relevant blocks 6810 if not self.skin_instance: 6811 return # no skin, nothing to do 6812 self._validate_skin() 6813 geomdata = self.data 6814 skininst = self.skin_instance 6815 skindata = skininst.data 6816 6817 verts = geomdata.vertices 6818 6819 for skindatablock in skindata.bone_list: 6820 # find all vertices influenced by this bone 6821 boneverts = [verts[skinweight.index] 6822 for skinweight in skindatablock.vertex_weights] 6823 6824 # find bounding box of these vertices 6825 low = NifFormat.Vector3() 6826 low.x = min(v.x for v in boneverts) 6827 low.y = min(v.y for v in boneverts) 6828 low.z = min(v.z for v in boneverts) 6829 6830 high = NifFormat.Vector3() 6831 high.x = max(v.x for v in boneverts) 6832 high.y = max(v.y for v in boneverts) 6833 high.z = max(v.z for v in boneverts) 6834 6835 # center is in the center of the bounding box 6836 center = (low + high) * 0.5 6837 6838 # radius is the largest distance from the center 6839 r2 = 0.0 6840 for v in boneverts: 6841 d = center - v 6842 r2 = max(r2, d.x*d.x+d.y*d.y+d.z*d.z) 6843 radius = r2 ** 0.5 6844 6845 # transform center in proper coordinates (radius remains unaffected) 6846 center *= skindatablock.get_transform() 6847 6848 # save data 6849 skindatablock.bounding_sphere_offset.x = center.x 6850 skindatablock.bounding_sphere_offset.y = center.y 6851 skindatablock.bounding_sphere_offset.z = center.z 6852 skindatablock.bounding_sphere_radius = radius
6853
6854 - def get_interchangeable_tri_shape(self, triangles=None):
6855 """Returns a NiTriShape block that is geometrically 6856 interchangeable. If you do not want to set the triangles 6857 from the original shape, use the triangles argument. 6858 """ 6859 # copy the shape (first to NiTriBasedGeom and then to NiTriShape) 6860 shape = NifFormat.NiTriShape().deepcopy( 6861 NifFormat.NiTriBasedGeom().deepcopy(self)) 6862 # copy the geometry without strips 6863 shapedata = NifFormat.NiTriShapeData().deepcopy( 6864 NifFormat.NiTriBasedGeomData().deepcopy(self.data)) 6865 # update the shape data 6866 if triangles is None: 6867 shapedata.set_triangles(self.data.get_triangles()) 6868 else: 6869 shapedata.set_triangles(triangles) 6870 # relink the shape data 6871 shape.data = shapedata 6872 # and return the result 6873 return shape
6874
6875 - def get_interchangeable_tri_strips(self, strips=None):
6876 """Returns a NiTriStrips block that is geometrically 6877 interchangeable. If you do not want to set the strips 6878 from the original shape, use the strips argument. 6879 """ 6880 # copy the shape (first to NiTriBasedGeom and then to NiTriStrips) 6881 strips_ = NifFormat.NiTriStrips().deepcopy( 6882 NifFormat.NiTriBasedGeom().deepcopy(self)) 6883 # copy the geometry without triangles 6884 stripsdata = NifFormat.NiTriStripsData().deepcopy( 6885 NifFormat.NiTriBasedGeomData().deepcopy(self.data)) 6886 # update the shape data 6887 if strips is None: 6888 stripsdata.set_strips(self.data.get_strips()) 6889 else: 6890 stripsdata.set_strips(strips) 6891 # relink the shape data 6892 strips_.data = stripsdata 6893 # and return the result 6894 return strips_
6895
6896 - class NiTriShapeData:
6897 """ 6898 Example usage: 6899 6900 >>> from pyffi.formats.nif import NifFormat 6901 >>> block = NifFormat.NiTriShapeData() 6902 >>> block.set_triangles([(0,1,2),(2,1,3),(2,3,4)]) 6903 >>> block.get_strips() 6904 [[0, 1, 2, 3, 4]] 6905 >>> block.get_triangles() 6906 [(0, 1, 2), (2, 1, 3), (2, 3, 4)] 6907 >>> block.set_strips([[1,0,1,2,3,4]]) 6908 >>> block.get_strips() # stripifier keeps geometry but nothing else 6909 [[0, 2, 1, 3], [2, 4, 3]] 6910 >>> block.get_triangles() 6911 [(0, 2, 1), (1, 2, 3), (2, 4, 3)] 6912 """
6913 - def get_triangles(self):
6914 return [(t.v_1, t.v_2, t.v_3) for t in self.triangles]
6915
6916 - def set_triangles(self, triangles, stitchstrips = False):
6917 # note: the stitchstrips argument is ignored - only present to ensure 6918 # uniform interface between NiTriShapeData and NiTriStripsData 6919 6920 # initialize triangle array 6921 n = len(triangles) 6922 self.num_triangles = n 6923 self.num_triangle_points = 3*n 6924 self.has_triangles = (n > 0) 6925 self.triangles.update_size() 6926 6927 # copy triangles 6928 src = triangles.__iter__() 6929 dst = self.triangles.__iter__() 6930 for k in xrange(n): 6931 dst_t = dst.next() 6932 dst_t.v_1, dst_t.v_2, dst_t.v_3 = src.next()
6933
6934 - def get_strips(self):
6936
6937 - def set_strips(self, strips):
6938 self.set_triangles(pyffi.utils.tristrip.triangulate(strips))
6939
6940 - class NiTriStripsData:
6941 """ 6942 Example usage: 6943 6944 >>> from pyffi.formats.nif import NifFormat 6945 >>> block = NifFormat.NiTriStripsData() 6946 >>> block.set_triangles([(0,1,2),(2,1,3),(2,3,4)]) 6947 >>> block.get_strips() 6948 [[0, 1, 2, 3, 4]] 6949 >>> block.get_triangles() 6950 [(0, 1, 2), (1, 3, 2), (2, 3, 4)] 6951 >>> block.set_strips([[1,0,1,2,3,4]]) 6952 >>> block.get_strips() 6953 [[1, 0, 1, 2, 3, 4]] 6954 >>> block.get_triangles() 6955 [(0, 2, 1), (1, 2, 3), (2, 4, 3)] 6956 """
6957 - def get_triangles(self):
6958 return pyffi.utils.tristrip.triangulate(self.points)
6959
6960 - def set_triangles(self, triangles, stitchstrips = False):
6961 self.set_strips(pyffi.utils.vertex_cache.stripify( 6962 triangles, stitchstrips=stitchstrips))
6963
6964 - def get_strips(self):
6965 return [[i for i in strip] for strip in self.points]
6966
6967 - def set_strips(self, strips):
6968 # initialize strips array 6969 self.num_strips = len(strips) 6970 self.strip_lengths.update_size() 6971 numtriangles = 0 6972 for i, strip in enumerate(strips): 6973 self.strip_lengths[i] = len(strip) 6974 numtriangles += len(strip) - 2 6975 self.num_triangles = numtriangles 6976 self.points.update_size() 6977 self.has_points = (len(strips) > 0) 6978 6979 # copy strips 6980 for i, strip in enumerate(strips): 6981 for j, idx in enumerate(strip): 6982 self.points[i][j] = idx
6983
6984 - class RagdollDescriptor:
6985 - def update_a_b(self, transform):
6986 """Update B pivot and axes from A using the given transform.""" 6987 # pivot point 6988 pivot_b = ((7 * self.pivot_a.get_vector_3()) * transform) / 7.0 6989 self.pivot_b.x = pivot_b.x 6990 self.pivot_b.y = pivot_b.y 6991 self.pivot_b.z = pivot_b.z 6992 # axes (rotation only) 6993 transform = transform.get_matrix_33() 6994 plane_b = self.plane_a.get_vector_3() * transform 6995 twist_b = self.twist_a.get_vector_3() * transform 6996 self.plane_b.x = plane_b.x 6997 self.plane_b.y = plane_b.y 6998 self.plane_b.z = plane_b.z 6999 self.twist_b.x = twist_b.x 7000 self.twist_b.y = twist_b.y 7001 self.twist_b.z = twist_b.z
7002
7003 - class SkinData:
7004 - def get_transform(self):
7005 """Return scale, rotation, and translation into a single 4x4 matrix.""" 7006 m = NifFormat.Matrix44() 7007 m.set_scale_rotation_translation(self.scale, self.rotation, self.translation) 7008 return m
7009
7010 - def set_transform(self, m):
7011 """Set rotation, transform, and velocity.""" 7012 scale, rotation, translation = m.get_scale_rotation_translation() 7013 7014 self.scale = scale 7015 7016 self.rotation.m_11 = rotation.m_11 7017 self.rotation.m_12 = rotation.m_12 7018 self.rotation.m_13 = rotation.m_13 7019 self.rotation.m_21 = rotation.m_21 7020 self.rotation.m_22 = rotation.m_22 7021 self.rotation.m_23 = rotation.m_23 7022 self.rotation.m_31 = rotation.m_31 7023 self.rotation.m_32 = rotation.m_32 7024 self.rotation.m_33 = rotation.m_33 7025 7026 self.translation.x = translation.x 7027 self.translation.y = translation.y 7028 self.translation.z = translation.z
7029
7030 - class StringPalette:
7031 - def get_string(self, offset):
7032 """Return string at given offset. 7033 7034 >>> from pyffi.formats.nif import NifFormat 7035 >>> pal = NifFormat.StringPalette() 7036 >>> pal.add_string("abc") 7037 0 7038 >>> pal.add_string("def") 7039 4 7040 >>> print(pal.get_string(0).decode("ascii")) 7041 abc 7042 >>> print(pal.get_string(4).decode("ascii")) 7043 def 7044 >>> pal.get_string(5) # doctest: +ELLIPSIS 7045 Traceback (most recent call last): 7046 ... 7047 ValueError: ... 7048 >>> pal.get_string(100) # doctest: +ELLIPSIS 7049 Traceback (most recent call last): 7050 ... 7051 ValueError: ... 7052 """ 7053 _b00 = pyffi.object_models.common._b00 # shortcut 7054 # check that offset isn't too large 7055 if offset >= len(self.palette): 7056 raise ValueError( 7057 "StringPalette: getting string at %i " 7058 "but palette is only %i long" 7059 % (offset, len(self.palette))) 7060 # check that a string starts at this offset 7061 if offset > 0 and self.palette[offset-1:offset] != _b00: 7062 raise ValueError( 7063 "StringPalette: no string starts at offset %i " 7064 "(palette is %s)" % (offset, self.palette)) 7065 # return the string 7066 return self.palette[offset:self.palette.find(_b00, offset)]
7067
7068 - def get_all_strings(self):
7069 """Return a list of all strings. 7070 7071 >>> from pyffi.formats.nif import NifFormat 7072 >>> pal = NifFormat.StringPalette() 7073 >>> pal.add_string("abc") 7074 0 7075 >>> pal.add_string("def") 7076 4 7077 >>> for x in pal.get_all_strings(): 7078 ... print(x.decode("ascii")) 7079 abc 7080 def 7081 >>> # pal.palette.decode("ascii") needs lstrip magic for py3k 7082 >>> print(repr(pal.palette.decode("ascii")).lstrip("u")) 7083 'abc\\x00def\\x00' 7084 """ 7085 _b00 = pyffi.object_models.common._b00 # shortcut 7086 return self.palette[:-1].split(_b00)
7087
7088 - def add_string(self, text):
7089 """Adds string to palette (will recycle existing strings if possible) and 7090 return offset to the string in the palette. 7091 7092 >>> from pyffi.formats.nif import NifFormat 7093 >>> pal = NifFormat.StringPalette() 7094 >>> pal.add_string("abc") 7095 0 7096 >>> pal.add_string("abc") 7097 0 7098 >>> pal.add_string("def") 7099 4 7100 >>> pal.add_string("") 7101 -1 7102 >>> print(pal.get_string(4).decode("ascii")) 7103 def 7104 """ 7105 # empty text 7106 if not text: 7107 return -1 7108 _b00 = pyffi.object_models.common._b00 # shortcut 7109 # convert text to bytes if necessary 7110 text = pyffi.object_models.common._as_bytes(text) 7111 # check if string is already in the palette 7112 # ... at the start 7113 if text + _b00 == self.palette[:len(text) + 1]: 7114 return 0 7115 # ... or elsewhere 7116 offset = self.palette.find(_b00 + text + _b00) 7117 if offset != -1: 7118 return offset + 1 7119 # if no match, add the string 7120 if offset == -1: 7121 offset = len(self.palette) 7122 self.palette = self.palette + text + _b00 7123 self.length += len(text) + 1 7124 # return the offset 7125 return offset
7126
7127 - def clear(self):
7128 """Clear all strings in the palette. 7129 7130 >>> from pyffi.formats.nif import NifFormat 7131 >>> pal = NifFormat.StringPalette() 7132 >>> pal.add_string("abc") 7133 0 7134 >>> pal.add_string("def") 7135 4 7136 >>> # pal.palette.decode("ascii") needs lstrip magic for py3k 7137 >>> print(repr(pal.palette.decode("ascii")).lstrip("u")) 7138 'abc\\x00def\\x00' 7139 >>> pal.clear() 7140 >>> # pal.palette.decode("ascii") needs lstrip magic for py3k 7141 >>> print(repr(pal.palette.decode("ascii")).lstrip("u")) 7142 '' 7143 """ 7144 self.palette = pyffi.object_models.common._b # empty bytes object 7145 self.length = 0
7146
7147 - class TexCoord:
7148 - def as_list(self):
7149 return [self.u, self.v]
7150
7151 - def normalize(self):
7152 r = (self.u*self.u + self.v*self.v) ** 0.5 7153 if r < NifFormat.EPSILON: 7154 raise ZeroDivisionError('cannot normalize vector %s'%self) 7155 self.u /= r 7156 self.v /= r
7157
7158 - def __str__(self):
7159 return "[ %6.3f %6.3f ]"%(self.u, self.v)
7160
7161 - def __mul__(self, x):
7162 if isinstance(x, (float, int, long)): 7163 v = NifFormat.TexCoord() 7164 v.u = self.u * x 7165 v.v = self.v * x 7166 return v 7167 elif isinstance(x, NifFormat.TexCoord): 7168 return self.u * x.u + self.v * x.v 7169 else: 7170 raise TypeError("do not know how to multiply TexCoord with %s"%x.__class__)
7171
7172 - def __rmul__(self, x):
7173 if isinstance(x, (float, int, long)): 7174 v = NifFormat.TexCoord() 7175 v.u = x * self.u 7176 v.v = x * self.v 7177 return v 7178 else: 7179 raise TypeError("do not know how to multiply %s and TexCoord"%x.__class__)
7180
7181 - def __add__(self, x):
7182 if isinstance(x, (float, int, long)): 7183 v = NifFormat.TexCoord() 7184 v.u = self.u + x 7185 v.v = self.v + x 7186 return v 7187 elif isinstance(x, NifFormat.TexCoord): 7188 v = NifFormat.TexCoord() 7189 v.u = self.u + x.u 7190 v.v = self.v + x.v 7191 return v 7192 else: 7193 raise TypeError("do not know how to add TexCoord and %s"%x.__class__)
7194
7195 - def __radd__(self, x):
7196 if isinstance(x, (float, int, long)): 7197 v = NifFormat.TexCoord() 7198 v.u = x + self.u 7199 v.v = x + self.v 7200 return v 7201 else: 7202 raise TypeError("do not know how to add %s and TexCoord"%x.__class__)
7203
7204 - def __sub__(self, x):
7205 if isinstance(x, (float, int, long)): 7206 v = NifFormat.TexCoord() 7207 v.u = self.u - x 7208 v.v = self.v - x 7209 return v 7210 elif isinstance(x, NifFormat.TexCoord): 7211 v = NifFormat.TexCoord() 7212 v.u = self.u - x.u 7213 v.v = self.v - x.v 7214 return v 7215 else: 7216 raise TypeError("do not know how to substract TexCoord and %s"%x.__class__)
7217
7218 - def __rsub__(self, x):
7219 if isinstance(x, (float, int, long)): 7220 v = NifFormat.TexCoord() 7221 v.u = x - self.u 7222 v.v = x - self.v 7223 return v 7224 else: 7225 raise TypeError("do not know how to substract %s and TexCoord"%x.__class__)
7226
7227 - def __neg__(self):
7228 v = NifFormat.TexCoord() 7229 v.u = -self.u 7230 v.v = -self.v 7231 return v
7232
7233 - class NiPSysData:
7234 - def _get_filtered_attribute_list(self, data=None):
7235 # simple hack to act as if we force num_vertices = 0 7236 for attr in StructBase._get_filtered_attribute_list(self, data): 7237 if data and (attr.name in ["vertices", 7238 "normals", "tangents", "bitangents", 7239 "vertex_colors", "uv_sets"]): 7240 if data.version >= 0x14020007 and data.user_version == 11: 7241 continue 7242 yield attr
7243 7244 if __name__=='__main__': 7245 import doctest 7246 doctest.testmod() 7247