9
9
import tempfile
10
10
import os
11
11
import sys
12
- import stat
13
12
import subprocess
14
13
import glob
15
14
from cStringIO import StringIO
16
15
17
- from typ import *
16
+ from stat import (
17
+ S_ISLNK ,
18
+ S_ISDIR ,
19
+ S_IFMT ,
20
+ S_IFDIR ,
21
+ S_IFLNK ,
22
+ S_IFREG
23
+ )
24
+
25
+ from typ import (
26
+ BaseIndexEntry ,
27
+ IndexEntry ,
28
+ CE_NAMEMASK ,
29
+ CE_STAGESHIFT
30
+ )
31
+
18
32
from util import (
19
33
TemporaryFileSwap ,
20
- clear_cache ,
34
+ post_clear_cache ,
21
35
default_index ,
22
36
pack ,
23
37
unpack
@@ -75,7 +89,7 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable):
75
89
"""
76
90
__slots__ = ( "repo" , "version" , "entries" , "_extension_data" , "_file_path" )
77
91
_VERSION = 2 # latest version we support
78
- S_IFGITLINK = 0160000
92
+ S_IFGITLINK = 0160000 # a submodule
79
93
80
94
def __init__ (self , repo , file_path = None ):
81
95
"""
@@ -141,12 +155,12 @@ def _read_entry(cls, stream):
141
155
mtime = unpack (">8s" , stream .read (8 ))[0 ]
142
156
(dev , ino , mode , uid , gid , size , sha , flags ) = \
143
157
unpack (">LLLLLL20sH" , stream .read (20 + 4 * 6 + 2 ))
144
- path_size = flags & 0x0fff
158
+ path_size = flags & CE_NAMEMASK
145
159
path = stream .read (path_size )
146
160
147
161
real_size = ((stream .tell () - beginoffset + 8 ) & ~ 7 )
148
162
data = stream .read ((beginoffset + real_size ) - stream .tell ())
149
- return IndexEntry ((mode , binascii .hexlify (sha ), flags >> 12 , path , ctime , mtime , dev , ino , uid , gid , size ))
163
+ return IndexEntry ((mode , binascii .hexlify (sha ), flags , path , ctime , mtime , dev , ino , uid , gid , size ))
150
164
151
165
@classmethod
152
166
def _read_header (cls , stream ):
@@ -198,7 +212,7 @@ def _serialize(self, stream, ignore_tree_extension_data=False):
198
212
199
213
# body
200
214
entries_sorted = self .entries .values ()
201
- entries_sorted .sort (key = lambda e : (e [3 ], e [ 2 ] )) # use path/stage as sort key
215
+ entries_sorted .sort (key = lambda e : (e [3 ], e . stage )) # use path/stage as sort key
202
216
for entry in entries_sorted :
203
217
self ._write_cache_entry (stream , entry )
204
218
# END for each entry
@@ -226,17 +240,18 @@ def _serialize(self, stream, ignore_tree_extension_data=False):
226
240
def _write_cache_entry (cls , stream , entry ):
227
241
""" Write an IndexEntry to a stream """
228
242
beginoffset = stream .tell ()
229
- stream .write (entry [4 ]) # ctime
230
- stream .write (entry [5 ]) # mtime
243
+ write = stream .write
244
+ write (entry [4 ]) # ctime
245
+ write (entry [5 ]) # mtime
231
246
path = entry [3 ]
232
- plen = len (path ) & 0x0fff # path length
247
+ plen = len (path ) & CE_NAMEMASK # path length
233
248
assert plen == len (path ), "Path %s too long to fit into index" % entry [3 ]
234
- flags = plen | ( entry [2 ] << 12 ) # stage and path length are 2 byte flags
235
- stream . write (pack (">LLLLLL20sH" , entry [6 ], entry [7 ], entry [0 ],
249
+ flags = plen | entry [2 ]
250
+ write (pack (">LLLLLL20sH" , entry [6 ], entry [7 ], entry [0 ],
236
251
entry [8 ], entry [9 ], entry [10 ], binascii .unhexlify (entry [1 ]), flags ))
237
- stream . write (path )
252
+ write (path )
238
253
real_size = ((stream .tell () - beginoffset + 8 ) & ~ 7 )
239
- stream . write ("\0 " * ((beginoffset + real_size ) - stream .tell ()))
254
+ write ("\0 " * ((beginoffset + real_size ) - stream .tell ()))
240
255
241
256
def write (self , file_path = None , ignore_tree_extension_data = False ):
242
257
"""
@@ -272,7 +287,7 @@ def write(self, file_path = None, ignore_tree_extension_data=False):
272
287
if file_path is not None :
273
288
self ._file_path = file_path
274
289
275
- @clear_cache
290
+ @post_clear_cache
276
291
@default_index
277
292
def merge_tree (self , rhs , base = None ):
278
293
"""Merge the given rhs treeish into the current index, possibly taking
@@ -383,24 +398,14 @@ def from_tree(cls, repo, *treeish, **kwargs):
383
398
return index
384
399
385
400
@classmethod
386
- def _index_mode_to_tree_index_mode (cls , index_mode ):
387
- """
388
- Cleanup a index_mode value.
389
- This will return a index_mode that can be stored in a tree object.
390
-
391
- ``index_mode``
392
- Index_mode to clean up.
393
- """
394
- if stat .S_ISLNK (index_mode ):
395
- return stat .S_IFLNK
396
- elif stat .S_ISDIR (index_mode ):
397
- return stat .S_IFDIR
398
- elif stat .S_IFMT (index_mode ) == cls .S_IFGITLINK :
401
+ def _stat_mode_to_index_mode (cls , mode ):
402
+ """Convert the given mode from a stat call to the corresponding index mode
403
+ and return it"""
404
+ if S_ISLNK (mode ): # symlinks
405
+ return S_IFLNK
406
+ if S_ISDIR (mode ) or S_IFMT (mode ) == cls .S_IFGITLINK : # submodules
399
407
return cls .S_IFGITLINK
400
- ret = stat .S_IFREG | 0644
401
- ret |= (index_mode & 0111 )
402
- return ret
403
-
408
+ return S_IFREG | 644 | (mode & 0100 ) # blobs with or without executable bit
404
409
405
410
# UTILITIES
406
411
def _iter_expand_paths (self , paths ):
@@ -479,7 +484,9 @@ def iter_blobs(self, predicate = lambda t: True):
479
484
only if they match a given list of paths.
480
485
"""
481
486
for entry in self .entries .itervalues ():
482
- mode = self ._index_mode_to_tree_index_mode (entry .mode )
487
+ # TODO: is it necessary to convert the mode ? We did that when adding
488
+ # it to the index, right ?
489
+ mode = self ._stat_mode_to_index_mode (entry .mode )
483
490
blob = Blob (self .repo , entry .sha , mode , entry .path )
484
491
blob .size = entry .size
485
492
output = (entry .stage , blob )
@@ -636,7 +643,6 @@ def _preprocess_add_items(self, items):
636
643
# END for each item
637
644
return (paths , entries )
638
645
639
- @clear_cache
640
646
@default_index
641
647
def add (self , items , force = True , fprogress = lambda * args : None , path_rewriter = None ):
642
648
"""Add files from the working tree, specific blobs or BaseIndexEntries
@@ -739,7 +745,7 @@ def store_path(filepath):
739
745
"""Store file at filepath in the database and return the base index entry"""
740
746
st = os .lstat (filepath ) # handles non-symlinks as well
741
747
stream = None
742
- if stat . S_ISLNK (st .st_mode ):
748
+ if S_ISLNK (st .st_mode ):
743
749
stream = StringIO (os .readlink (filepath ))
744
750
else :
745
751
stream = open (filepath , 'rb' )
@@ -759,13 +765,6 @@ def store_path(filepath):
759
765
for filepath in self ._iter_expand_paths (paths ):
760
766
entries_added .append (store_path (filepath ))
761
767
# END for each filepath
762
-
763
- # add the new entries to this instance, and write it
764
- for entry in entries_added :
765
- self .entries [(entry .path , 0 )] = IndexEntry .from_base (entry )
766
-
767
- # finally write the changed index
768
- self .write ()
769
768
# END path handling
770
769
771
770
@@ -823,6 +822,14 @@ def store_path(filepath):
823
822
self ._flush_stdin_and_wait (proc , ignore_stdout = True )
824
823
entries_added .extend (entries )
825
824
# END if there are base entries
825
+
826
+ # FINALIZE
827
+ # add the new entries to this instance, and write it
828
+ for entry in entries_added :
829
+ self .entries [(entry .path , 0 )] = IndexEntry .from_base (entry )
830
+
831
+ # finally write the changed index
832
+ self .write ()
826
833
827
834
return entries_added
828
835
@@ -840,7 +847,7 @@ def _items_to_rela_paths(self, items):
840
847
# END for each item
841
848
return paths
842
849
843
- @clear_cache
850
+ @post_clear_cache
844
851
@default_index
845
852
def remove (self , items , working_tree = False , ** kwargs ):
846
853
"""
@@ -893,7 +900,7 @@ def remove(self, items, working_tree=False, **kwargs):
893
900
# rm 'path'
894
901
return [ p [4 :- 1 ] for p in removed_paths ]
895
902
896
- @clear_cache
903
+ @post_clear_cache
897
904
@default_index
898
905
def move (self , items , skip_errors = False , ** kwargs ):
899
906
"""
@@ -1127,7 +1134,7 @@ def handle_stderr(proc, iter_checked_out_files):
1127
1134
# END paths handling
1128
1135
assert "Should not reach this point"
1129
1136
1130
- @clear_cache
1137
+ @post_clear_cache
1131
1138
@default_index
1132
1139
def reset (self , commit = 'HEAD' , working_tree = False , paths = None , head = False , ** kwargs ):
1133
1140
"""
0 commit comments