Package pyffi :: Package utils
[hide private]
[frames] | no frames]

Source Code for Package pyffi.utils

  1  """This module bundles various general purpose utilities: 
  2  - hexdumping 
  3  - parsing all files in a directory tree 
  4  - 3D related tasks (see TriStrip.py, MathUtils.py, QuickHull.py, and Inertia.py) 
  5  """ 
  6   
  7  # ***** BEGIN LICENSE BLOCK ***** 
  8  # 
  9  # Copyright (c) 2007-2011, Python File Format Interface 
 10  # All rights reserved. 
 11  # 
 12  # Redistribution and use in source and binary forms, with or without 
 13  # modification, are permitted provided that the following conditions 
 14  # are met: 
 15  # 
 16  #    * Redistributions of source code must retain the above copyright 
 17  #      notice, this list of conditions and the following disclaimer. 
 18  # 
 19  #    * Redistributions in binary form must reproduce the above 
 20  #      copyright notice, this list of conditions and the following 
 21  #      disclaimer in the documentation and/or other materials provided 
 22  #      with the distribution. 
 23  # 
 24  #    * Neither the name of the Python File Format Interface 
 25  #      project nor the names of its contributors may be used to endorse 
 26  #      or promote products derived from this software without specific 
 27  #      prior written permission. 
 28  # 
 29  # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 
 30  # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 
 31  # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 
 32  # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 
 33  # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 
 34  # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 
 35  # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 
 36  # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 
 37  # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 
 38  # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 
 39  # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 
 40  # POSSIBILITY OF SUCH DAMAGE. 
 41  # 
 42  # ***** END LICENSE BLOCK ***** 
 43   
 44  import os 
 45   
46 -def walk(top, topdown=True, onerror=None, re_filename=None):
47 """A variant of os.walk() which also works if top is a file instead of a 48 directory, filters files by name, and returns full path. File names are 49 returned in alphabetical order. 50 51 :param top: The top directory or file. 52 :type top: str 53 :param topdown: Whether to list directories first or not. 54 :type topdown: bool 55 :param onerror: Which function to call when an error occurs. 56 :type onerror: function 57 :param re_filename: Regular expression to match file names. 58 :type re_filename: compiled regular expression (see re module) 59 """ 60 if os.path.isfile(top): 61 dirpath, filename = os.path.split(top) 62 if re_filename: 63 if re_filename.match(filename): 64 yield top 65 else: 66 yield top 67 else: 68 for dirpath, dirnames, filenames in os.walk(top): 69 filenames = sorted(filenames) 70 for filename in filenames: 71 if re_filename: 72 if re_filename.match(filename): 73 yield os.path.join(dirpath, filename) 74 else: 75 yield os.path.join(dirpath, filename)
76 77 #table = "."*32 78 #for c in [chr(i) for i in xrange(32,128)]: 79 # table += c 80 #table += "."*128 81 chartable = '................................ !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~.................................................................................................................................'.encode("ascii") 82
83 -def hex_dump(f, num_lines = 8):
84 """A function for hexdumping. 85 86 >>> from tempfile import TemporaryFile 87 >>> f = TemporaryFile() 88 >>> if f.write('abcdefg\\x0a'.encode("ascii")): pass 89 >>> if f.seek(2): pass # ignore result for py3k 90 >>> hex_dump(f, 2) 91 00 01 02 03 04 05 06 07 08 09 0A 0B 0C 0D 0E 0F 92 ----------------------------------------------------------- 93 0x00000000 61 62>63 64 65 66 67 0A |abcdefg. | 94 0x00000010 | | 95 <BLANKLINE> 96 """ 97 98 dumpstr = "" 99 100 pos = f.tell() 101 if pos > num_lines*8: 102 f.seek((pos-num_lines*8) & 0xfffffff0) 103 else: 104 f.seek(0) 105 dumppos = f.tell() 106 dumpstr += " " 107 for ofs in xrange(16): 108 dumpstr += "%02X " % ofs 109 dumpstr += "\n-----------------------------------------------------------\n" 110 for i in xrange(num_lines): 111 dumpstr += "0x%08X " % dumppos 112 data = f.read(16) 113 for j, c in enumerate(data): 114 # py3k: data is bytes object, so c is int already 115 # py2x: data is string, so convert c to int with ord 116 if isinstance(c, int): 117 cc = c 118 else: 119 cc = ord(c) 120 if dumppos + j != pos: 121 dumpstr += " %02X" % cc 122 else: 123 dumpstr += ">%02X" % cc 124 for j in xrange(len(data), 16): 125 dumpstr += " " 126 data += " ".encode("ascii") 127 dumpstr += " |" + data.translate(chartable).decode("ascii") + "|\n" 128 dumppos += 16 129 print(dumpstr)
130
131 -def unique_map(hash_generator):
132 """Return a map and inverse map to indentify unique values based 133 on hash, which is useful for removing duplicate data. If the hash 134 generator yields None then the value is mapped to None (useful for 135 discarding data). 136 137 >>> unique_map([]) 138 ([], []) 139 >>> unique_map([3,2,6,None,1]) 140 ([0, 1, 2, None, 3], [0, 1, 2, 4]) 141 >>> unique_map([3,1,6,1]) 142 ([0, 1, 2, 1], [0, 1, 2]) 143 >>> unique_map([3,1,6,1,2,2,9,3,2]) 144 ([0, 1, 2, 1, 3, 3, 4, 0, 3], [0, 1, 2, 4, 6]) 145 """ 146 hash_map = [] # maps old index to new index 147 hash_map_inverse = [] # inverse: map new index to old index 148 hash_index_map = {None: None} # maps hash to new index (default for None) 149 new_index = 0 150 for old_index, hash_ in enumerate(hash_generator): 151 try: 152 hash_index = hash_index_map[hash_] 153 except KeyError: 154 # hash is new 155 hash_index_map[hash_] = new_index 156 hash_map.append(new_index) 157 hash_map_inverse.append(old_index) 158 new_index += 1 159 else: 160 # hash already exists 161 hash_map.append(hash_index) 162 return hash_map, hash_map_inverse
163 164 if __name__=='__main__': 165 import doctest 166 doctest.testmod() 167