本文整理汇总了Python中mercurial.match.always函数的典型用法代码示例。如果您正苦于以下问题:Python always函数的具体用法?Python always怎么用?Python always使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了always函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: scmutiladdremove
def scmutiladdremove(orig, repo, pats=[], opts={}, dry_run=None,
similarity=None):
if not lfutil.islfilesrepo(repo):
return orig(repo, pats, opts, dry_run, similarity)
# Get the list of missing largefiles so we can remove them
lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [], False,
False, False)
(unsure, modified, added, removed, missing, unknown, ignored, clean) = s
# Call into the normal remove code, but the removing of the standin, we want
# to have handled by original addremove. Monkey patching here makes sure
# we don't remove the standin in the largefiles code, preventing a very
# confused state later.
if missing:
m = [repo.wjoin(f) for f in missing]
repo._isaddremove = True
removelargefiles(repo.ui, repo, *m, **opts)
repo._isaddremove = False
# Call into the normal add code, and any files that *should* be added as
# largefiles will be
addlargefiles(repo.ui, repo, *pats, **opts)
# Now that we've handled largefiles, hand off to the original addremove
# function to take care of the rest. Make sure it doesn't do anything with
# largefiles by installing a matcher that will ignore them.
installnormalfilesmatchfn(repo[None].manifest())
result = orig(repo, pats, opts, dry_run, similarity)
restorematchfn()
return result
开发者ID:jordigh,项目名称:mercurial-crew,代码行数:29,代码来源:overrides.py
示例2: overrideupdate
def overrideupdate(orig, ui, repo, *pats, **opts):
lfdirstate = lfutil.openlfdirstate(ui, repo)
s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [], False,
False, False)
(unsure, modified, added, removed, missing, unknown, ignored, clean) = s
# Need to lock between the standins getting updated and their
# largefiles getting updated
wlock = repo.wlock()
try:
if opts['check']:
mod = len(modified) > 0
for lfile in unsure:
standin = lfutil.standin(lfile)
if repo['.'][standin].data().strip() != \
lfutil.hashfile(repo.wjoin(lfile)):
mod = True
else:
lfdirstate.normal(lfile)
lfdirstate.write()
if mod:
raise util.Abort(_('uncommitted changes'))
# XXX handle removed differently
if not opts['clean']:
for lfile in unsure + modified + added:
lfutil.updatestandin(repo, lfutil.standin(lfile))
finally:
wlock.release()
return orig(ui, repo, *pats, **opts)
开发者ID:jordigh,项目名称:mercurial-crew,代码行数:29,代码来源:overrides.py
示例3: getchangegroup
def getchangegroup(orig, repo, source, heads=None, common=None, bundlecaps=None):
if not requirement in repo.requirements:
return orig(repo, source, heads=heads, common=common,
bundlecaps=bundlecaps)
original = repo.shallowmatch
try:
# if serving, only send files the clients has patterns for
if source == 'serve':
includepattern = None
excludepattern = None
for cap in (bundlecaps or []):
if cap.startswith("includepattern="):
raw = cap[len("includepattern="):]
if raw:
includepattern = raw.split('\0')
elif cap.startswith("excludepattern="):
raw = cap[len("excludepattern="):]
if raw:
excludepattern = raw.split('\0')
if includepattern or excludepattern:
repo.shallowmatch = match.match(repo.root, '', None,
includepattern, excludepattern)
else:
repo.shallowmatch = match.always(repo.root, '')
return orig(repo, source, heads, common, bundlecaps)
finally:
repo.shallowmatch = original
开发者ID:pycontribs,项目名称:remotefilelog,代码行数:28,代码来源:shallowbundle.py
示例4: makechangegroup
def makechangegroup(orig, repo, outgoing, version, source, *args, **kwargs):
if not requirement in repo.requirements:
return orig(repo, outgoing, version, source, *args, **kwargs)
original = repo.shallowmatch
try:
# if serving, only send files the clients has patterns for
if source == 'serve':
bundlecaps = kwargs.get('bundlecaps')
includepattern = None
excludepattern = None
for cap in (bundlecaps or []):
if cap.startswith("includepattern="):
raw = cap[len("includepattern="):]
if raw:
includepattern = raw.split('\0')
elif cap.startswith("excludepattern="):
raw = cap[len("excludepattern="):]
if raw:
excludepattern = raw.split('\0')
if includepattern or excludepattern:
repo.shallowmatch = match.match(repo.root, '', None,
includepattern, excludepattern)
else:
repo.shallowmatch = match.always(repo.root, '')
return orig(repo, outgoing, version, source, *args, **kwargs)
finally:
repo.shallowmatch = original
开发者ID:davidshepherd7,项目名称:dotfiles,代码行数:28,代码来源:shallowbundle.py
示例5: diffs
def diffs(repo, tmpl, ctx, basectx, files, parity, style):
def countgen():
start = 1
while True:
yield start
start += 1
blockcount = countgen()
def prettyprintlines(diff, blockno):
for lineno, l in enumerate(diff.splitlines(True)):
difflineno = "%d.%d" % (blockno, lineno + 1)
if l.startswith('+'):
ltype = "difflineplus"
elif l.startswith('-'):
ltype = "difflineminus"
elif l.startswith('@'):
ltype = "difflineat"
else:
ltype = "diffline"
yield tmpl(ltype,
line=l,
lineno=lineno + 1,
lineid="l%s" % difflineno,
linenumber="% 8s" % difflineno)
if files:
m = match.exact(repo.root, repo.getcwd(), files)
else:
m = match.always(repo.root, repo.getcwd())
diffopts = patch.diffopts(repo.ui, untrusted=True)
if basectx is None:
parents = ctx.parents()
if parents:
node1 = parents[0].node()
else:
node1 = nullid
else:
node1 = basectx.node()
node2 = ctx.node()
block = []
for chunk in patch.diff(repo, node1, node2, m, opts=diffopts):
if chunk.startswith('diff') and block:
blockno = blockcount.next()
yield tmpl('diffblock', parity=parity.next(), blockno=blockno,
lines=prettyprintlines(''.join(block), blockno))
block = []
if chunk.startswith('diff') and style != 'raw':
chunk = ''.join(chunk.splitlines(True)[1:])
block.append(chunk)
blockno = blockcount.next()
yield tmpl('diffblock', parity=parity.next(), blockno=blockno,
lines=prettyprintlines(''.join(block), blockno))
开发者ID:html-shell,项目名称:mozilla-build,代码行数:55,代码来源:webutil.py
示例6: lfdirstate_status
def lfdirstate_status(lfdirstate, repo, rev):
match = match_.always(repo.root, repo.getcwd())
s = lfdirstate.status(match, [], False, False, False)
unsure, modified, added, removed, missing, unknown, ignored, clean = s
for lfile in unsure:
if repo[rev][standin(lfile)].data().strip() != \
hashfile(repo.wjoin(lfile)):
modified.append(lfile)
else:
clean.append(lfile)
lfdirstate.normal(lfile)
return (modified, added, removed, missing, unknown, ignored, clean)
开发者ID:sandeepprasanna,项目名称:ODOO,代码行数:12,代码来源:lfutil.py
示例7: applyone
def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
filter=None):
'''apply the patch in patchfile to the repository as a transplant'''
(manifest, user, (time, timezone), files, message) = cl[:5]
date = "%d %d" % (time, timezone)
extra = {'transplant_source': node}
if filter:
(user, date, message) = self.filter(filter, node, cl, patchfile)
if log:
# we don't translate messages inserted into commits
message += '\n(transplanted from %s)' % nodemod.hex(node)
self.ui.status(_('applying %s\n') % nodemod.short(node))
self.ui.note('%s %s\n%s\n' % (user, date, message))
if not patchfile and not merge:
raise error.Abort(_('can only omit patchfile if merging'))
if patchfile:
try:
files = set()
patch.patch(self.ui, repo, patchfile, files=files, eolmode=None)
files = list(files)
except Exception as inst:
seriespath = os.path.join(self.path, 'series')
if os.path.exists(seriespath):
os.unlink(seriespath)
p1 = repo.dirstate.p1()
p2 = node
self.log(user, date, message, p1, p2, merge=merge)
self.ui.write(str(inst) + '\n')
raise TransplantError(_('fix up the working directory and run '
'hg transplant --continue'))
else:
files = None
if merge:
p1, p2 = repo.dirstate.parents()
repo.setparents(p1, node)
m = match.always(repo.root, '')
else:
m = match.exact(repo.root, '', files)
n = repo.commit(message, user, date, extra=extra, match=m,
editor=self.getcommiteditor())
if not n:
self.ui.warn(_('skipping emptied changeset %s\n') %
nodemod.short(node))
return None
if not merge:
self.transplants.set(n, node)
return n
开发者ID:motlin,项目名称:cyg,代码行数:52,代码来源:transplant.py
示例8: _walkstreamfiles
def _walkstreamfiles(orig, repo):
if state.shallowremote:
# if we are shallow ourselves, stream our local commits
if shallowrepo.requirement in repo.requirements:
striplen = len(repo.store.path) + 1
readdir = repo.store.rawvfs.readdir
visit = [os.path.join(repo.store.path, 'data')]
while visit:
p = visit.pop()
for f, kind, st in readdir(p, stat=True):
fp = p + '/' + f
if kind == stat.S_IFREG:
if not fp.endswith('.i') and not fp.endswith('.d'):
n = util.pconvert(fp[striplen:])
yield (store.decodedir(n), n, st.st_size)
if kind == stat.S_IFDIR:
visit.append(fp)
shallowtrees = repo.ui.configbool('remotefilelog', 'shallowtrees',
False)
if 'treemanifest' in repo.requirements and not shallowtrees:
for (u, e, s) in repo.store.datafiles():
if (u.startswith('meta/') and
(u.endswith('.i') or u.endswith('.d'))):
yield (u, e, s)
# Return .d and .i files that do not match the shallow pattern
match = state.match
if match and not match.always():
for (u, e, s) in repo.store.datafiles():
f = u[5:-2] # trim data/... and .i/.d
if not state.match(f):
yield (u, e, s)
for x in repo.store.topfiles():
if shallowtrees and x[0][:15] == '00manifesttree.':
continue
if state.noflatmf and x[0][:11] == '00manifest.':
continue
yield x
elif shallowrepo.requirement in repo.requirements:
# don't allow cloning from a shallow repo to a full repo
# since it would require fetching every version of every
# file in order to create the revlogs.
raise error.Abort(_("Cannot clone from a shallow repo "
"to a full repo."))
else:
for x in orig(repo):
yield x
开发者ID:davidshepherd7,项目名称:dotfiles,代码行数:50,代码来源:remotefilelogserver.py
示例9: lfdirstatestatus
def lfdirstatestatus(lfdirstate, repo, rev):
match = match_.always(repo.root, repo.getcwd())
s = lfdirstate.status(match, [], False, False, False)
unsure, modified, added, removed, missing, unknown, ignored, clean = s
for lfile in unsure:
try:
fctx = repo[rev][standin(lfile)]
except LookupError:
fctx = None
if not fctx or fctx.data().strip() != hashfile(repo.wjoin(lfile)):
modified.append(lfile)
else:
clean.append(lfile)
lfdirstate.normal(lfile)
return (modified, added, removed, missing, unknown, ignored, clean)
开发者ID:32bitfloat,项目名称:intellij-community,代码行数:15,代码来源:lfutil.py
示例10: lfdirstatestatus
def lfdirstatestatus(lfdirstate, repo):
wctx = repo['.']
match = match_.always(repo.root, repo.getcwd())
unsure, s = lfdirstate.status(match, [], False, False, False)
modified, clean = s.modified, s.clean
for lfile in unsure:
try:
fctx = wctx[standin(lfile)]
except LookupError:
fctx = None
if not fctx or fctx.data().strip() != hashfile(repo.wjoin(lfile)):
modified.append(lfile)
else:
clean.append(lfile)
lfdirstate.normal(lfile)
return s
开发者ID:ZanderZhang,项目名称:Andriod-Learning,代码行数:16,代码来源:lfutil.py
示例11: _commit
def _commit(orig, self, *args, **kwargs):
if _disabled[0]:
return orig(self, *args, **kwargs)
with self.wlock(), self.lock(), self.transaction('dirsynccommit'):
matcher = args[3] if len(args) >= 4 else kwargs.get('match')
matcher = matcher or matchmod.always(self.root, '')
mirroredfiles = _updateworkingcopy(self, matcher)
if mirroredfiles and not matcher.always():
origmatch = matcher.matchfn
def extramatches(path):
return path in mirroredfiles or origmatch(path)
matcher.matchfn = extramatches
matcher._files.extend(mirroredfiles)
matcher._fileset.update(mirroredfiles)
return orig(self, *args, **kwargs)
开发者ID:davidshepherd7,项目名称:dotfiles,代码行数:18,代码来源:dirsync.py
示例12: generatefiles
def generatefiles(orig, self, changedfiles, linknodes, commonrevs, source):
caps = self._bundlecaps or []
if shallowrepo.requirement in caps:
# only send files that don't match the specified patterns
includepattern = None
excludepattern = None
for cap in (self._bundlecaps or []):
if cap.startswith("includepattern="):
includepattern = cap[len("includepattern="):].split('\0')
elif cap.startswith("excludepattern="):
excludepattern = cap[len("excludepattern="):].split('\0')
m = match.always(repo.root, '')
if includepattern or excludepattern:
m = match.match(repo.root, '', None,
includepattern, excludepattern)
changedfiles = list([f for f in changedfiles if not m(f)])
return orig(self, changedfiles, linknodes, commonrevs, source)
开发者ID:pycontribs,项目名称:remotefilelog,代码行数:19,代码来源:remotefilelogserver.py
示例13: diffs
def diffs(repo, tmpl, ctx, files, parity, style):
def countgen():
start = 1
while True:
yield start
start += 1
blockcount = countgen()
def prettyprintlines(diff):
blockno = blockcount.next()
for lineno, l in enumerate(diff.splitlines(True)):
lineno = "%d.%d" % (blockno, lineno + 1)
if l.startswith("+"):
ltype = "difflineplus"
elif l.startswith("-"):
ltype = "difflineminus"
elif l.startswith("@"):
ltype = "difflineat"
else:
ltype = "diffline"
yield tmpl(ltype, line=l, lineid="l%s" % lineno, linenumber="% 8s" % lineno)
if files:
m = match.exact(repo.root, repo.getcwd(), files)
else:
m = match.always(repo.root, repo.getcwd())
diffopts = patch.diffopts(repo.ui, untrusted=True)
parents = ctx.parents()
node1 = parents and parents[0].node() or nullid
node2 = ctx.node()
block = []
for chunk in patch.diff(repo, node1, node2, m, opts=diffopts):
if chunk.startswith("diff") and block:
yield tmpl("diffblock", parity=parity.next(), lines=prettyprintlines("".join(block)))
block = []
if chunk.startswith("diff") and style != "raw":
chunk = "".join(chunk.splitlines(True)[1:])
block.append(chunk)
yield tmpl("diffblock", parity=parity.next(), lines=prettyprintlines("".join(block)))
开发者ID:helloandre,项目名称:cr48,代码行数:42,代码来源:webutil.py
示例14: stream_out_shallow
def stream_out_shallow(repo, proto, other):
includepattern = None
excludepattern = None
raw = other.get('includepattern')
if raw:
includepattern = raw.split('\0')
raw = other.get('excludepattern')
if raw:
excludepattern = raw.split('\0')
oldshallow = state.shallowremote
oldmatch = state.match
try:
state.shallowremote = True
state.match = match.always(repo.root, '')
if includepattern or excludepattern:
state.match = match.match(repo.root, '', None,
includepattern, excludepattern)
return wireproto.stream(repo, proto)
finally:
state.shallowremote = oldshallow
state.match = oldmatch
开发者ID:pycontribs,项目名称:remotefilelog,代码行数:22,代码来源:remotefilelogserver.py
示例15: stream_out_shallow
def stream_out_shallow(repo, proto, other):
includepattern = None
excludepattern = None
raw = other.get('includepattern')
if raw:
includepattern = raw.split('\0')
raw = other.get('excludepattern')
if raw:
excludepattern = raw.split('\0')
oldshallow = state.shallowremote
oldmatch = state.match
oldnoflatmf = state.noflatmf
try:
state.shallowremote = True
state.match = match.always(repo.root, '')
state.noflatmf = other.get('noflatmanifest') == 'True'
if includepattern or excludepattern:
state.match = match.match(repo.root, '', None,
includepattern, excludepattern)
streamres = wireprotov1server.stream(repo, proto)
# Force the first value to execute, so the file list is computed
# within the try/finally scope
first = next(streamres.gen)
second = next(streamres.gen)
def gen():
yield first
yield second
for value in streamres.gen:
yield value
return wireprototypes.streamres(gen())
finally:
state.shallowremote = oldshallow
state.match = oldmatch
state.noflatmf = oldnoflatmf
开发者ID:davidshepherd7,项目名称:dotfiles,代码行数:36,代码来源:remotefilelogserver.py
示例16: _walkstreamfiles
def _walkstreamfiles(orig, repo):
if state.shallowremote:
# if we are shallow ourselves, stream our local commits
if shallowrepo.requirement in repo.requirements:
striplen = len(repo.store.path) + 1
readdir = repo.store.rawvfs.readdir
visit = [os.path.join(repo.store.path, 'data')]
while visit:
p = visit.pop()
for f, kind, st in readdir(p, stat=True):
fp = p + '/' + f
if kind == stat.S_IFREG:
if not fp.endswith('.i') and not fp.endswith('.d'):
n = util.pconvert(fp[striplen:])
yield (store.decodedir(n), n, st.st_size)
if kind == stat.S_IFDIR:
visit.append(fp)
# Return .d and .i files that do not match the shallow pattern
match = state.match or match.always(repo.root, '')
for (u, e, s) in repo.store.datafiles():
f = u[5:-2] # trim data/... and .i/.d
if not state.match(f):
yield (u, e, s)
for x in repo.store.topfiles():
yield x
elif shallowrepo.requirement in repo.requirements:
# don't allow cloning from a shallow repo to a full repo
# since it would require fetching every version of every
# file in order to create the revlogs.
raise util.Abort(_("Cannot clone from a shallow repo "
+ "to a full repo."))
else:
for x in orig(repo):
yield x
开发者ID:pycontribs,项目名称:remotefilelog,代码行数:36,代码来源:remotefilelogserver.py
示例17: status
def status(self, node1='.', node2=None, match=None, ignored=False,
clean=False, unknown=False, listsubrepos=False):
listignored, listclean, listunknown = ignored, clean, unknown
orig = super(lfilesrepo, self).status
if not self.lfstatus:
return orig(node1, node2, match, listignored, listclean,
listunknown, listsubrepos)
# some calls in this function rely on the old version of status
self.lfstatus = False
ctx1 = self[node1]
ctx2 = self[node2]
working = ctx2.rev() is None
parentworking = working and ctx1 == self['.']
if match is None:
match = match_.always(self.root, self.getcwd())
wlock = None
try:
try:
# updating the dirstate is optional
# so we don't wait on the lock
wlock = self.wlock(False)
except error.LockError:
pass
# First check if paths or patterns were specified on the
# command line. If there were, and they don't match any
# largefiles, we should just bail here and let super
# handle it -- thus gaining a big performance boost.
lfdirstate = lfutil.openlfdirstate(ui, self)
if not match.always():
for f in lfdirstate:
if match(f):
break
else:
return orig(node1, node2, match, listignored, listclean,
listunknown, listsubrepos)
# Create a copy of match that matches standins instead
# of largefiles.
def tostandins(files):
if not working:
return files
newfiles = []
dirstate = self.dirstate
for f in files:
sf = lfutil.standin(f)
if sf in dirstate:
newfiles.append(sf)
elif sf in dirstate.dirs():
# Directory entries could be regular or
# standin, check both
newfiles.extend((f, sf))
else:
newfiles.append(f)
return newfiles
m = copy.copy(match)
m._files = tostandins(m._files)
result = orig(node1, node2, m, ignored, clean, unknown,
listsubrepos)
if working:
def sfindirstate(f):
sf = lfutil.standin(f)
dirstate = self.dirstate
return sf in dirstate or sf in dirstate.dirs()
match._files = [f for f in match._files
if sfindirstate(f)]
# Don't waste time getting the ignored and unknown
# files from lfdirstate
unsure, s = lfdirstate.status(match, [], False, listclean,
False)
(modified, added, removed, clean) = (s.modified, s.added,
s.removed, s.clean)
if parentworking:
for lfile in unsure:
standin = lfutil.standin(lfile)
if standin not in ctx1:
# from second parent
modified.append(lfile)
elif ctx1[standin].data().strip() \
!= lfutil.hashfile(self.wjoin(lfile)):
modified.append(lfile)
else:
if listclean:
clean.append(lfile)
lfdirstate.normal(lfile)
else:
tocheck = unsure + modified + added + clean
modified, added, clean = [], [], []
checkexec = self.dirstate._checkexec
for lfile in tocheck:
standin = lfutil.standin(lfile)
if standin in ctx1:
#.........这里部分代码省略.........
开发者ID:raymundviloria,项目名称:android-app,代码行数:101,代码来源:reposetup.py
示例18: updatestandinsbymatch
def updatestandinsbymatch(repo, match):
'''Update standins in the working directory according to specified match
This returns (possibly modified) ``match`` object to be used for
subsequent commit process.
'''
ui = repo.ui
# Case 1: user calls commit with no specific files or
# include/exclude patterns: refresh and commit all files that
# are "dirty".
if match is None or match.always():
# Spend a bit of time here to get a list of files we know
# are modified so we can compare only against those.
# It can cost a lot of time (several seconds)
# otherwise to update all standins if the largefiles are
# large.
lfdirstate = openlfdirstate(ui, repo)
dirtymatch = match_.always(repo.root, repo.getcwd())
unsure, s = lfdirstate.status(dirtymatch, [], False, False,
False)
modifiedfiles = unsure + s.modified + s.added + s.removed
lfiles = listlfiles(repo)
# this only loops through largefiles that exist (not
# removed/renamed)
for lfile in lfiles:
if lfile in modifiedfiles:
if os.path.exists(
repo.wjoin(standin(lfile))):
# this handles the case where a rebase is being
# performed and the working copy is not updated
# yet.
if os.path.exists(repo.wjoin(lfile)):
updatestandin(repo,
standin(lfile))
return match
lfiles = listlfiles(repo)
match._files = repo._subdirlfs(match.files(), lfiles)
# Case 2: user calls commit with specified patterns: refresh
# any matching big files.
smatcher = composestandinmatcher(repo, match)
standins = repo.dirstate.walk(smatcher, [], False, False)
# No matching big files: get out of the way and pass control to
# the usual commit() method.
if not standins:
return match
# Refresh all matching big files. It's possible that the
# commit will end up failing, in which case the big files will
# stay refreshed. No harm done: the user modified them and
# asked to commit them, so sooner or later we're going to
# refresh the standins. Might as well leave them refreshed.
lfdirstate = openlfdirstate(ui, repo)
for fstandin in standins:
lfile = splitstandin(fstandin)
if lfdirstate[lfile] != 'r':
updatestandin(repo, fstandin)
# Cook up a new matcher that only matches regular files or
# standins corresponding to the big files requested by the
# user. Have to modify _files to prevent commit() from
# complaining "not tracked" for big files.
match = copy.copy(match)
origmatchfn = match.matchfn
# Check both the list of largefiles and the list of
# standins because if a largefile was removed, it
# won't be in the list of largefiles at this point
match._files += sorted(standins)
actualfiles = []
for f in match._files:
fstandin = standin(f)
# ignore known largefiles and standins
if f in lfiles or fstandin in standins:
continue
actualfiles.append(f)
match._files = actualfiles
def matchfn(f):
if origmatchfn(f):
return f not in lfiles
else:
return f in standins
match.matchfn = matchfn
return match
开发者ID:areshero,项目名称:ThirdWorldApp,代码行数:95,代码来源:lfutil.py
示例19: commit
def commit(self, text="", user=None, date=None, match=None,
force=False, editor=False, extra={}):
orig = super(lfiles_repo, self).commit
wlock = repo.wlock()
try:
if getattr(repo, "_isrebasing", False):
# We have to take the time to pull down the new
# largefiles now. Otherwise if we are rebasing,
# any largefiles that were modified in the
# destination changesets get overwritten, either
# by the rebase or in the first commit after the
# rebase.
lfcommands.updatelfiles(repo.ui, repo)
# Case 1: user calls commit with no specific files or
# include/exclude patterns: refresh and commit all files that
# are "dirty".
if ((match is None) or
(not match.anypats() and not match.files())):
# Spend a bit of time here to get a list of files we know
# are modified so we can compare only against those.
# It can cost a lot of time (several seconds)
# otherwise to update all standins if the largefiles are
# large.
lfdirstate = lfutil.openlfdirstate(ui, self)
dirtymatch = match_.always(repo.root, repo.getcwd())
s = lfdirstate.status(dirtymatch, [], False, False, False)
modifiedfiles = []
for i in s:
modifiedfiles.extend(i)
lfiles = lfutil.listlfiles(self)
# this only loops through largefiles that exist (not
# removed/renamed)
for lfile in lfiles:
if lfile in modifiedfiles:
if os.path.exists(self.wjoin(lfutil.standin(lfile))):
# this handles the case where a rebase is being
# performed and the working copy is not updated
# yet.
if os.path.exists(self.wjoin(lfile)):
lfutil.updatestandin(self,
lfutil.standin(lfile))
lfdirstate.normal(lfile)
for lfile in lfdirstate:
if lfile in modifiedfiles:
if not os.path.exists(
repo.wjoin(lfutil.standin(lfile))):
lfdirstate.drop(lfile)
lfdirstate.write()
return orig(text=text, user=user, date=date, match=match,
force=force, editor=editor, extra=extra)
for f in match.files():
if lfutil.isstandin(f):
raise util.Abort(
_('file "%s" is a largefile standin') % f,
hint=('commit the largefile itself instead'))
# Case 2: user calls commit with specified patterns: refresh
# any matching big files.
smatcher = lfutil.composestandinmatcher(self, match)
standins = lfutil.dirstate_walk(self.dirstate, smatcher)
# No matching big files: get out of the way and pass control to
# the usual commit() method.
if not standins:
return orig(text=text, user=user, date=date, match=match,
force=force, editor=editor, extra=extra)
# Refresh all matching big files. It's possible that the
# commit will end up failing, in which case the big files will
# stay refreshed. No harm done: the user modified them and
# asked to commit them, so sooner or later we're going to
# refresh the standins. Might as well leave them refreshed.
lfdirstate = lfutil.openlfdirstate(ui, self)
for standin in standins:
lfile = lfutil.splitstandin(standin)
if lfdirstate[lfile] <> 'r':
lfutil.updatestandin(self, standin)
lfdirstate.normal(lfile)
else:
lfdirstate.drop(lfile)
lfdirstate.write()
# Cook up a new matcher that only matches regular files or
# standins corresponding to the big files requested by the
# user. Have to modify _files to prevent commit() from
# complaining "not tracked" for big files.
lfiles = lfutil.listlfiles(repo)
match = copy.copy(match)
orig_matchfn = match.matchfn
# Check both the list of largefiles and the list of
# standins because if a largefile was removed, it
# won't be in the list of largefiles at this point
match._files += sorted(standins)
actualfiles = []
for f in match._files:
#.........这里部分代码省略.........
开发者ID:mortonfox,项目名称:cr48,代码行数:101,代码来源:reposetup.py
示例20: commit
def commit(self, text="", user=None, date=None, match=None,
force=False, editor=False, extra={}):
orig = super(lfilesrepo, self).commit
wlock = self.wlock()
try:
# Case 0: Rebase or Transplant
# We have to take the time to pull down the new largefiles now.
# Otherwise, any largefiles that were modified in the
# destination changesets get overwritten, either by the rebase
# or in the first commit after the rebase or transplant.
# updatelfiles will update the dirstate to mark any pulled
# largefiles as modified
if getattr(self, "_isrebasing", False) or \
getattr(self, "_istransplanting", False):
lfcommands.updatelfiles(self.ui, self, filelist=None,
printmessage=False)
result = orig(text=text, user=user, date=date, match=match,
force=force, editor=editor, extra=extra)
return result
# Case 1: user calls commit with no specific files or
# include/exclude patterns: refresh and commit all files that
# are "dirty".
if ((match is None) or
(not match.anypats() and not match.files())):
# Spend a bit of time here to get a list of files we know
# are modified so we can compare only against those.
# It can cost a lot of time (several seconds)
# otherwise to update all standins if the largefiles are
# large.
lfdirstate = lfutil.openlfdirstate(ui, self)
dirtymatch = match_.always(self.root, self.getcwd())
s = lfdirstate.status(dirtymatch, [], False, False, False)
(unsure, modified, added, removed, _missing, _unknown,
_ignored, _clean) = s
modifiedfiles = unsure + modified + added + removed
lfiles = lfutil.listlfiles(self)
# this only loops through largefiles that exist (not
# removed/renamed)
for lfile in lfiles:
if lfile in modifiedfiles:
|
请发表评论