doom3-gpl
Doom 3 GPL source release
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
id_utils.py
Go to the documentation of this file.
1 # a collection of utility functions to manipulate pak files
2 
3 import os, zipfile, md5, pdb
4 
5 # sorts in reverse alphabetical order like doom does for searching
6 def list_paks( path ):
7  files = os.listdir( path )
8  for i in files:
9  if ( i[-4:] != '.pk4' ):
10  files.remove( i )
11  files.sort()
12  files.reverse()
13  return files
14 
15 def list_files_in_pak( pak ):
16  files = []
17  zippy = zipfile.ZipFile( pak )
18  files += zippy.namelist()
19  files.sort()
20  return files
21 
22 # no sorting, blunt list of everything
23 def list_files_in_paks( path ):
24  files = []
25  zippies = list_paks( path )
26  for fname in zippies:
27  print fname
28  zippy = zipfile.ZipFile( os.path.join( path, fname ) )
29  files += zippy.namelist()
30  # sort and remove dupes
31  dico = {}
32  for f in files:
33  dico[ f ] = 1
34  files = dico.keys()
35  files.sort()
36  return files
37 
38 # build a dictionary of names -> ( pak name, md5 ) from a path of pk4s
39 def md5_in_paks( path ):
40  ret = {}
41  zippies = list_paks( path )
42  for fname in zippies:
43  print fname
44  zippy = zipfile.ZipFile( os.path.join( path, fname ) )
45  for file in zippy.namelist():
46  if ( ret.has_key( file ) ):
47  continue
48  data = zippy.read( file )
49  m = md5.new()
50  m.update( data )
51  ret[ file ] = ( fname, m.hexdigest() )
52  return ret
53 
54 # find which files need to be updated in a set of paks from an expanded list
55 # returns ( updated, not_found, {} )
56 # ignores directories
57 # by default, no case match is done
58 # if case match is set, return ( updated, not_found, { zip case -> FS case } )
59 # updated will contain the zip case name
60 def list_updated_files( pak_path, base_path, case_match = False ):
61  not_found = []
62  updated = []
63  case_table = {}
64  pak_md5 = md5_in_paks( pak_path )
65  for file in pak_md5.keys():
66  if ( file[-1] == '/' ):
67  continue
68  path = os.path.join( base_path, file )
69  if ( case_match ):
70  ret = ifind( base_path, file )
71  if ( not ret[ 0 ] ):
72  not_found.append( file )
73  continue
74  else:
75  case_table[ path ] = ret[ 1 ]
76  path = os.path.join( base_path, ret[ 1 ] )
77  try:
78  f = open( path )
79  data = f.read()
80  f.close()
81  except:
82  if ( case_match ):
83  raise "internal error: ifind success but later read failed"
84  not_found.append( file )
85  else:
86  m = md5.new()
87  m.update( data )
88  if ( m.hexdigest() != pak_md5[ file ][ 1 ] ):
89  print file
90  updated.append( file )
91  return ( updated, not_found, case_table )
92 
93 # find which files are missing in the expanded path, and extract the directories
94 # returns ( files, dirs, missing )
95 def status_files_for_path( path, infiles ):
96  files = []
97  dirs = []
98  missing = []
99  for i in infiles:
100  test_path = os.path.join( path, i )
101  if ( os.path.isfile( test_path ) ):
102  files.append( i )
103  elif ( os.path.isdir( test_path ) ):
104  dirs.append( i )
105  else:
106  missing.append( i )
107  return ( files, dirs, missing )
108 
109 # build a pak from a base path and a list of files
110 def build_pak( pak, path, files ):
111  zippy = zipfile.ZipFile( pak, 'w', zipfile.ZIP_DEFLATED )
112  for i in files:
113  source_path = os.path.join( path, i )
114  print source_path
115  zippy.write( source_path, i )
116  zippy.close()
117 
118 # process the list of files after a run to update media
119 # dds/ -> verify all the .dds are present in zip ( case insensitive )
120 # .wav -> verify that all .wav have a .ogg version in zip ( case insensitive )
121 # .tga not in dds/ -> try to find a .dds for them
122 # work from a list of files, and a path to the base pak files
123 # files: text files with files line by line
124 # pak_path: the path to the pak files to compare against
125 # returns: ( [ missing ], [ bad ] )
126 # bad are files the function didn't know what to do about ( bug )
127 # missing are lowercased of all the files that where not matched in build
128 # the dds/ ones are all forced to .dds extension
129 # missing .wav are returned in the missing list both as .wav and .ogg
130 # ( that's handy when you need to fetch next )
131 def check_files_against_build( files, pak_path ):
132  pak_list = list_files_in_paks( pak_path )
133  # make it lowercase
134  tmp = []
135  for i in pak_list:
136  tmp.append( i.lower() )
137  pak_list = tmp
138  # read the files and make them lowercase
139  f = open( files )
140  check_files = f.readlines()
141  f.close()
142  tmp = []
143  for i in check_files:
144  s = i.lower()
145  s = s.replace( '\n', '' )
146  s = s.replace( '\r', '' )
147  tmp.append( s )
148  check_files = tmp
149  # start processing
150  bad = []
151  missing = []
152  for i in check_files:
153  if ( i[ :4 ] == 'dds/' ):
154  if ( i[ len(i)-4: ] == '.tga' ):
155  i = i[ :-4 ] + '.dds'
156  elif ( i[ len(i)-4: ] != '.dds' ):
157  print 'File not understood: ' + i
158  bad.append( i )
159  continue
160  try:
161  pak_list.index( i )
162  except:
163  print 'Not found: ' + i
164  missing.append( i )
165  elif ( i[ len(i)-4: ] == '.wav' ):
166  i = i[ :-4 ] + '.ogg'
167  try:
168  pak_list.index( i )
169  except:
170  print 'Not found: ' + i
171  missing.append( i )
172  missing.append( i[ :-4 ] + '.wav' )
173  elif ( i[ len(i)-4: ] == '.tga' ):
174  # tga, not from dds/
175  try:
176  pak_list.index( i )
177  except:
178  print 'Not found: ' + i
179  missing.append( i )
180  i = 'dds/' + i[ :-4 ] + '.dds'
181  print 'Add dds : ' + i
182  missing.append( i )
183  else:
184  try:
185  pak_list.index( i )
186  except:
187  print 'Not found: ' + i
188  missing.append( i )
189  return ( missing, bad )
190 
191 # match a path to a file in a case insensitive way
192 # return ( True/False, 'walked up to' )
193 def ifind( base, path ):
194  refpath = path
195  path = os.path.normpath( path )
196  path = os.path.normcase( path )
197  # early out just in case
198  if ( os.path.exists( path ) ):
199  return ( True, path )
200  head = path
201  components = []
202  while ( len( head ) ):
203  ( head, chunk ) = os.path.split( head )
204  components.append( chunk )
205  #print 'head: %s - components: %s' % ( head, repr( components ) )
206  components.reverse()
207  level = 0
208  for root, dirs, files in os.walk( base, topdown = True ):
209  if ( level < len( components ) - 1 ):
210  #print 'filter dirs: %s' % repr( dirs )
211  dirs_del = []
212  for i in dirs:
213  if ( not i.lower() == components[ level ].lower() ):
214  dirs_del.append( i )
215  for i in dirs_del:
216  dirs.remove( i )
217  level += 1
218  # we assume there is never going to be 2 dirs with only case difference
219  if ( len( dirs ) != 1 ):
220  #print '%s: ifind failed dirs matching at %s - dirs: %s' % ( refpath, root, repr( dirs ) )
221  return ( False, root[ len( base ) + 1: ] )
222  else:
223  # must find the file here
224  for i in files:
225  if ( i.lower() == components[-1].lower() ):
226  return ( True, os.path.join( root, i )[ len( base ) + 1: ] )
227  return ( False, root[ len( base ) + 1: ] )
228 
229 # do case insensitive FS search on files list
230 # return [ cased files, not found (unmodified ) ]
231 def ifind_list( base, files ):
232  cased = []
233  notfound = []
234  for i in files:
235  ret = ifind( base, i )
236  if ( ret[ 0 ] ):
237  cased.append( ret[ 1 ] )
238  else:
239  notfound.append( i )
240  return [ cased, notfound ]
241 
def md5_in_paks
Definition: id_utils.py:39
def list_paks
Definition: id_utils.py:6
def ifind_list
Definition: id_utils.py:231
def list_files_in_pak
Definition: id_utils.py:15
def ifind
Definition: id_utils.py:193
def list_files_in_paks
Definition: id_utils.py:23
def build_pak
Definition: id_utils.py:110
def list_updated_files
Definition: id_utils.py:60
def status_files_for_path
Definition: id_utils.py:95
def check_files_against_build
Definition: id_utils.py:131