mirror of
https://github.com/mnauw/git-remote-hg.git
synced 2026-01-17 03:32:05 +01:00
1948 lines
62 KiB
Python
Executable File
1948 lines
62 KiB
Python
Executable File
#!/usr/bin/env python
|
|
#
|
|
# Copyright (c) 2012 Felipe Contreras
|
|
# Copyright (c) 2016 Mark Nauwelaerts
|
|
#
|
|
|
|
# Inspired by Rocco Rutte's hg-fast-export
|
|
|
|
# Just copy to your ~/bin, or anywhere in your $PATH.
|
|
# Then you can clone with:
|
|
# git clone hg::/path/to/mercurial/repo/
|
|
#
|
|
# For remote repositories a local clone is stored in
|
|
# "$GIT_DIR/hg/origin/clone/.hg/".
|
|
|
|
from mercurial import hg, ui, bookmarks, context, encoding
|
|
from mercurial import node, error, extensions, discovery, util, scmutil
|
|
from mercurial import changegroup
|
|
|
|
import re
|
|
import sys
|
|
import os
|
|
import json
|
|
import shutil
|
|
import subprocess
|
|
import atexit
|
|
import hashlib
|
|
import io
|
|
import time as ptime
|
|
|
|
# python 2/3 compatibility approach:
|
|
# * all data exchanged with git or Mercurial is bytes (as was in python2)
|
|
# (and is also how Mercurial has internally migrated)
|
|
# * since such data typically includes paths, all paths are also always bytes
|
|
# * where it does no harm and/or involves plain ASCII anyway,
|
|
# either bytes or str is used as convenient (e.g. internal enum-like use, etc)
|
|
|
|
# generic
|
|
class basecompat:
|
|
@staticmethod
|
|
def char(c):
|
|
assert len(c) == 1
|
|
return c[0]
|
|
|
|
if sys.version_info[0] == 3:
|
|
import locale
|
|
import urllib.parse
|
|
class compat(basecompat):
|
|
# sigh ... wonderful python3 ... as taken from Mercurial's pycompat
|
|
@staticmethod
|
|
def decode_sysarg(arg):
|
|
if os.name == r'nt':
|
|
return arg.encode("mbcs", "ignore")
|
|
else:
|
|
enc = (
|
|
locale.getlocale()[1]
|
|
or locale.getdefaultlocale()[1]
|
|
or sys.getfilesystemencoding()
|
|
)
|
|
return arg.encode(enc, "surrogateescape")
|
|
@staticmethod
|
|
def iteritems(d):
|
|
return d.items()
|
|
# mostly used for straight 'cast' (not real unicode content)
|
|
@staticmethod
|
|
def to_b(s, *args):
|
|
if isinstance(s, str):
|
|
args = args or ['latin-1']
|
|
return s.encode(*args)
|
|
return s
|
|
@staticmethod
|
|
def to_str(s):
|
|
if isinstance(s, bytes):
|
|
return s.decode('latin-1')
|
|
return s
|
|
@staticmethod
|
|
def urlquote(*args, **kwargs):
|
|
return compat.to_b(urllib.parse.quote_from_bytes(*args, **kwargs))
|
|
@staticmethod
|
|
def urlunquote(*args, **kwargs):
|
|
return urllib.parse.unquote_to_bytes(*args, **kwargs)
|
|
@staticmethod
|
|
def unescape(s):
|
|
return bytes(s.decode('unicode-escape'), 'latin-1')
|
|
stdin = sys.stdin.buffer
|
|
stdout = sys.stdout.buffer
|
|
stderr = sys.stderr.buffer
|
|
getcwd = os.getcwdb
|
|
@staticmethod
|
|
def getenvb(val, default):
|
|
result = os.getenv(val.decode(), default.decode() if hasattr(default, 'decode') else default)
|
|
# if result is a string, get bytes instead
|
|
result = result.encode() if hasattr(result, 'encode') else result
|
|
return result
|
|
getenv = os.getenvb if os.supports_bytes_environ else getenvb
|
|
urlparse = urllib.parse.urlparse
|
|
urljoin = urllib.parse.urljoin
|
|
else:
|
|
import urllib
|
|
from urlparse import urlparse as _urlparse
|
|
from urlparse import urljoin as _urljoin
|
|
class compat(basecompat):
|
|
# life was simple in those days ...
|
|
@staticmethod
|
|
def iteritems(d):
|
|
return d.iteritems()
|
|
@staticmethod
|
|
def unescape(s):
|
|
return s.decode('string-escape')
|
|
@staticmethod
|
|
def to_b(s, *args):
|
|
return s
|
|
to_str = to_b
|
|
decode_sysarg = to_b
|
|
stdin = sys.stdin
|
|
stdout = sys.stdout
|
|
stderr = sys.stderr
|
|
getcwd = staticmethod(os.getcwd)
|
|
getenv = staticmethod(os.getenv)
|
|
urlquote = staticmethod(urllib.quote)
|
|
urlunquote = staticmethod(urllib.unquote)
|
|
urlparse = staticmethod(_urlparse)
|
|
urljoin = staticmethod(_urljoin)
|
|
|
|
#
|
|
# If you want to see Mercurial revisions as Git commit notes:
|
|
# git config core.notesRef refs/notes/hg
|
|
#
|
|
# If you are not in hg-git-compat mode and want to disable the tracking of
|
|
# named branches:
|
|
# git config --global remote-hg.track-branches false
|
|
#
|
|
# If you want the equivalent of hg's clone/pull--insecure option:
|
|
# git config --global remote-hg.insecure true
|
|
#
|
|
# If you want to switch to hg-git compatibility mode:
|
|
# git config --global remote-hg.hg-git-compat true
|
|
#
|
|
# git:
|
|
# Sensible defaults for git.
|
|
# hg bookmarks are exported as git branches, hg branches are prefixed
|
|
# with 'branches/', HEAD is a special case.
|
|
#
|
|
# hg:
|
|
# Emulate hg-git.
|
|
# Only hg bookmarks are exported as git branches.
|
|
# Commits are modified to preserve hg information and allow bidirectionality.
|
|
#
|
|
|
|
NAME_RE = re.compile(br'^([^<>]+)')
|
|
AUTHOR_RE = re.compile(br'^([^<>]+?)? ?[<>]([^<>]*)(?:$|>)')
|
|
EMAIL_RE = re.compile(br'([^ \t<>]+@[^ \t<>]+)')
|
|
AUTHOR_HG_RE = re.compile(br'^(.*?) ?<(.*?)(?:>(.*))?$')
|
|
RAW_AUTHOR_RE = re.compile(br'^(\w+) (?:(.+)? )?<(.*)> (\d+) ([+-]\d+)')
|
|
|
|
VERSION = 2
|
|
|
|
def die(msg):
|
|
compat.stderr.write(b'ERROR: %s\n' % compat.to_b(msg, 'utf-8'))
|
|
sys.exit(1)
|
|
|
|
def debug(*args):
|
|
compat.stderr.write(b'DEBUG: %s\n' % compat.to_b(repr(args)))
|
|
|
|
def warn(msg):
|
|
compat.stderr.write(b'WARNING: %s\n' % compat.to_b(msg, 'utf-8'))
|
|
compat.stderr.flush()
|
|
|
|
def puts(msg = b''):
|
|
compat.stdout.write(msg)
|
|
compat.stdout.write(b'\n')
|
|
|
|
def gitmode(flags):
|
|
return b'l' in flags and b'120000' or b'x' in flags and b'100755' or b'100644'
|
|
|
|
def gittz(tz):
|
|
sign = 1 if tz >= 0 else -1
|
|
hours, minutes = divmod(abs(tz), 60 * 60)
|
|
return b'%+03d%02d' % (-sign * hours, minutes / 60)
|
|
|
|
def hgtz(tz):
|
|
tz = int(tz)
|
|
sign = 1 if tz >= 0 else -1
|
|
hours, minutes = divmod(abs(tz), 100)
|
|
tz = hours * 60 * 60 + minutes * 60
|
|
return -sign * tz
|
|
|
|
def hgmode(mode):
|
|
m = { b'100755': b'x', b'120000': b'l' }
|
|
return m.get(mode, b'')
|
|
|
|
def hghex(n):
|
|
return node.hex(n)
|
|
|
|
def hgbin(n):
|
|
return node.bin(n)
|
|
|
|
def hgref(ref):
|
|
return compat.urlunquote(ref.replace(b'___', b'%20').replace(b'%5F%5F%5F', b'___'))
|
|
|
|
def gitref(ref):
|
|
# standard url percentage encoding with a (legacy) twist:
|
|
# ' ' -> '___'
|
|
# '___' also percentage encoded
|
|
# python 3.6 considers ~ reserved, whereas python 3.7 no longer
|
|
return compat.urlquote(ref).replace(b'___', b'%5F%5F%5F'). \
|
|
replace(b'%20', b'___').replace(b'~', b'%7E')
|
|
|
|
def check_version(*check):
|
|
if not hg_version:
|
|
return True
|
|
return hg_version >= check
|
|
|
|
def get_config(config, getall=False):
|
|
cmd = ['git', 'config', '--get' if not getall else '--get-all', config]
|
|
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
|
output, _ = process.communicate()
|
|
return output
|
|
|
|
def get_config_bool(config, default=False):
|
|
value = get_config(config).rstrip()
|
|
if value == b"true":
|
|
return True
|
|
elif value == b"false":
|
|
return False
|
|
else:
|
|
return default
|
|
|
|
def rev_parse(rev):
|
|
cmd = ['git', 'rev-parse', '--verify', '-q', rev]
|
|
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
|
output, _ = process.communicate()
|
|
return output
|
|
|
|
def get_rev_hg(commit):
|
|
cmd = ['git', 'notes', '--ref', 'refs/notes/hg', 'show', commit]
|
|
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
hgrev, _ = process.communicate()
|
|
return hgrev
|
|
|
|
class Marks:
|
|
|
|
def __init__(self, path, _repo=None):
|
|
self.path = path
|
|
self.clear()
|
|
self.load()
|
|
|
|
def clear(self):
|
|
self.tips = {}
|
|
self.marks = {}
|
|
self.rev_marks = {}
|
|
self.last_mark = 0
|
|
self.version = VERSION
|
|
self.last_note = 0
|
|
|
|
def load(self):
|
|
if not os.path.exists(self.path):
|
|
return
|
|
|
|
tmp = json.load(open(self.path))
|
|
# convert to binary entries
|
|
marks = {}
|
|
for r, m in compat.iteritems(tmp['marks']):
|
|
marks[compat.to_b(r)] = m
|
|
|
|
self.tips = []
|
|
self.marks = marks
|
|
self.last_mark = tmp['last-mark']
|
|
self.version = tmp['version']
|
|
self.last_note = 0
|
|
|
|
for rev, mark in compat.iteritems(self.marks):
|
|
self.rev_marks[mark] = rev
|
|
|
|
def dict(self):
|
|
return { 'tips': self.tips, 'marks': self.marks,
|
|
'last-mark': self.last_mark, 'version': self.version,
|
|
'last-note': self.last_note }
|
|
|
|
def store(self):
|
|
# convert to str prior to dump
|
|
d = self.dict()
|
|
marks = {}
|
|
for r, m in compat.iteritems(d['marks']):
|
|
marks[compat.to_str(r)] = m
|
|
d['marks'] = marks
|
|
json.dump(d, open(self.path, 'w'))
|
|
|
|
def __str__(self):
|
|
return str(self.dict())
|
|
|
|
def from_rev(self, rev):
|
|
return self.marks[rev]
|
|
|
|
def to_rev(self, mark):
|
|
return self.rev_marks[mark]
|
|
|
|
def next_mark(self):
|
|
self.last_mark += 1
|
|
return self.last_mark
|
|
|
|
def get_mark(self, rev):
|
|
self.last_mark += 1
|
|
self.marks[rev] = self.last_mark
|
|
return self.last_mark
|
|
|
|
def new_mark(self, rev, mark):
|
|
self.marks[rev] = mark
|
|
self.rev_marks[mark] = rev
|
|
self.last_mark = mark
|
|
|
|
def is_marked(self, rev):
|
|
return rev in self.marks
|
|
|
|
|
|
class ParserContext:
|
|
|
|
def __init__(self):
|
|
# known context attributes
|
|
self.localref = None
|
|
self.remoteref = None
|
|
self.gitmarks = None
|
|
self.hghelper = None
|
|
self.revs = []
|
|
|
|
class Parser:
|
|
|
|
def __init__(self, repo, cmdstream=compat.stdin, ctx=ParserContext()):
|
|
self.repo = repo
|
|
self.cmdstream = cmdstream
|
|
self.line = self.get_line()
|
|
self.context = ctx
|
|
|
|
def get_line(self):
|
|
return self.cmdstream.readline().strip()
|
|
|
|
def __getitem__(self, i):
|
|
return self.line.split()[i]
|
|
|
|
def check(self, word):
|
|
return self.line.startswith(word)
|
|
|
|
def each_block(self, separator):
|
|
while self.line != separator:
|
|
yield self.line
|
|
self.line = self.get_line()
|
|
|
|
def __iter__(self):
|
|
return self.each_block(b'')
|
|
|
|
def __next__(self):
|
|
return self.next()
|
|
|
|
def next(self):
|
|
self.line = self.get_line()
|
|
if self.line == b'done':
|
|
self.line = None
|
|
|
|
def get_mark(self):
|
|
i = self.line.index(b':') + 1
|
|
return int(self.line[i:])
|
|
|
|
def get_data(self):
|
|
if not self.check(b'data'):
|
|
return None
|
|
i = self.line.index(b' ') + 1
|
|
size = int(self.line[i:])
|
|
return self.cmdstream.read(size)
|
|
|
|
def get_author(self):
|
|
ex = None
|
|
m = RAW_AUTHOR_RE.match(self.line)
|
|
if not m:
|
|
return None
|
|
_, name, email, date, tz = m.groups()
|
|
if name and b'ext:' in name:
|
|
m = re.match(br'^(.+?) ext:\((.+)\)$', name)
|
|
if m:
|
|
name = m.group(1)
|
|
ex = compat.urlunquote(m.group(2))
|
|
|
|
if email != bad_mail:
|
|
if name:
|
|
user = b'%s <%s>' % (name, email)
|
|
else:
|
|
user = b'<%s>' % (email)
|
|
else:
|
|
user = name
|
|
|
|
if ex:
|
|
user += ex
|
|
|
|
return (user, int(date), hgtz(tz))
|
|
|
|
def fix_file_path(path):
|
|
path = os.path.normpath(path)
|
|
if os.path.isabs(path):
|
|
path = os.path.relpath(path, b'/')
|
|
if os.sep == '/':
|
|
return path
|
|
# even Git for Windows expects forward
|
|
return path.replace(compat.to_b(os.sep), b'/')
|
|
|
|
def export_file(ctx, fname):
|
|
f = ctx.filectx(fname)
|
|
fid = node.hex(f.filenode())
|
|
|
|
if fid in filenodes:
|
|
mark = filenodes[fid]
|
|
else:
|
|
mark = marks.next_mark()
|
|
filenodes[fid] = mark
|
|
d = f.data()
|
|
|
|
puts(b"blob")
|
|
puts(b"mark :%u" % mark)
|
|
puts(b"data %d" % len(d))
|
|
puts(f.data())
|
|
|
|
path = fix_file_path(f.path())
|
|
return (gitmode(f.flags()), mark, path)
|
|
|
|
def get_filechanges(repo, ctx, parent):
|
|
if hasattr(parent, 'status'):
|
|
stat = parent.status(ctx)
|
|
return stat.modified + stat.added, stat.removed
|
|
|
|
modified = set()
|
|
added = set()
|
|
removed = set()
|
|
|
|
# load earliest manifest first for caching reasons
|
|
prev = parent.manifest().copy()
|
|
cur = ctx.manifest()
|
|
|
|
for fn in cur:
|
|
if fn in prev:
|
|
if (cur.flags(fn) != prev.flags(fn) or cur[fn] != prev[fn]):
|
|
modified.add(fn)
|
|
del prev[fn]
|
|
else:
|
|
added.add(fn)
|
|
removed |= set(prev.keys())
|
|
|
|
return added | modified, removed
|
|
|
|
def fixup_user_git(user):
|
|
name = mail = None
|
|
global remove_username_quotes
|
|
if remove_username_quotes:
|
|
user = user.replace(b'"', b'')
|
|
m = AUTHOR_RE.match(user)
|
|
if m:
|
|
name = m.group(1)
|
|
mail = m.group(2).strip()
|
|
else:
|
|
m = EMAIL_RE.match(user)
|
|
if m:
|
|
mail = m.group(1)
|
|
else:
|
|
m = NAME_RE.match(user)
|
|
if m:
|
|
name = m.group(1).strip()
|
|
return (name, mail)
|
|
|
|
def fixup_user_hg(user):
|
|
def sanitize(name):
|
|
# stole this from hg-git
|
|
return re.sub(br'[<>\n]', b'?', name.lstrip(b'< ').rstrip(b'> '))
|
|
|
|
m = AUTHOR_HG_RE.match(user)
|
|
if m:
|
|
name = sanitize(m.group(1))
|
|
mail = sanitize(m.group(2))
|
|
ex = m.group(3)
|
|
if ex:
|
|
name += b' ext:(' + compat.urlquote(ex) + b')'
|
|
else:
|
|
name = sanitize(user)
|
|
if b'@' in user:
|
|
mail = name
|
|
else:
|
|
mail = None
|
|
|
|
return (name, mail)
|
|
|
|
def fixup_user(user):
|
|
if mode == 'git':
|
|
name, mail = fixup_user_git(user)
|
|
else:
|
|
name, mail = fixup_user_hg(user)
|
|
|
|
if not name:
|
|
name = bad_name
|
|
if not mail:
|
|
mail = bad_mail
|
|
|
|
return b'%s <%s>' % (name, mail)
|
|
|
|
def updatebookmarks(repo, peer):
|
|
remotemarks = peer.listkeys(b'bookmarks')
|
|
|
|
# delete bookmarks locally that disappeared on remote
|
|
localmarks = bookmarks.listbookmarks(repo)
|
|
remote = set(remotemarks.keys())
|
|
local = set(localmarks.keys())
|
|
for bmark in local - remote:
|
|
bookmarks.pushbookmark(repo, bmark, localmarks[bmark], b'')
|
|
# also delete private ref
|
|
pbookmark = b'%s/bookmarks/%s' % (prefix, bmark)
|
|
subprocess.call(['git', 'update-ref', '-d', pbookmark])
|
|
|
|
# now add or update remote bookmarks to local, if any
|
|
localmarks = repo._bookmarks
|
|
if not remotemarks:
|
|
return
|
|
|
|
# use a higher level API from now on than the lower one below
|
|
if check_version(4,6):
|
|
for k, v in compat.iteritems(remotemarks):
|
|
old = hghex(localmarks.get(k, b''))
|
|
bookmarks.pushbookmark(repo, k, old, v)
|
|
return
|
|
|
|
changes = { k: hgbin(v) for k, v in compat.iteritems(remotemarks) }
|
|
|
|
wlock = tr = None
|
|
try:
|
|
wlock = repo.wlock()
|
|
tr = repo.transaction(b'bookmark')
|
|
if check_version(4, 3):
|
|
localmarks.applychanges(repo, tr, list(changes.items()))
|
|
else:
|
|
localmarks.update(changes)
|
|
if check_version(3, 2):
|
|
localmarks.recordchange(tr)
|
|
elif check_version(2, 5):
|
|
localmarks.write()
|
|
else:
|
|
bookmarks.write(repo)
|
|
tr.close()
|
|
finally:
|
|
if tr is not None:
|
|
tr.release()
|
|
if wlock is not None:
|
|
wlock.release()
|
|
|
|
def get_repo(url, alias):
|
|
global peer
|
|
|
|
if hasattr(ui.ui, 'load'):
|
|
myui = ui.ui.load()
|
|
else:
|
|
myui = ui.ui()
|
|
myui.setconfig(b'ui', b'interactive', b'off')
|
|
myui.fout = compat.stderr
|
|
|
|
if get_config_bool('remote-hg.insecure'):
|
|
myui.setconfig(b'web', b'cacerts', b'')
|
|
|
|
extensions.loadall(myui)
|
|
|
|
if hg.islocal(url) and not os.environ.get('GIT_REMOTE_HG_TEST_REMOTE'):
|
|
repo = hg.repository(myui, url)
|
|
if not os.path.exists(dirname):
|
|
os.makedirs(dirname)
|
|
branchmap = repo.branchmap()
|
|
else:
|
|
shared_path = os.path.join(gitdir, b'hg')
|
|
|
|
# setup shared repo (if not there)
|
|
try:
|
|
hg.peer(myui, {}, shared_path, create=True)
|
|
except error.RepoError:
|
|
pass
|
|
|
|
if not os.path.exists(dirname):
|
|
os.makedirs(dirname)
|
|
|
|
local_path = os.path.join(dirname, b'clone')
|
|
kwargs = {}
|
|
hg_path = os.path.join(shared_path, b'.hg')
|
|
if check_version(4, 2):
|
|
kwargs = {'relative': True}
|
|
hg_path = os.path.join(b'..', b'..', b'..', b'.hg')
|
|
if not os.path.exists(local_path):
|
|
hg.share(myui, shared_path, local_path, update=False, **kwargs)
|
|
else:
|
|
# make sure the shared path is always up-to-date
|
|
util.writefile(os.path.join(local_path, b'.hg', b'sharedpath'), hg_path)
|
|
|
|
repo = hg.repository(myui, local_path)
|
|
try:
|
|
peer = hg.peer(repo.ui, {}, url)
|
|
except:
|
|
die('Repository error')
|
|
|
|
branchmap = peer.branchmap()
|
|
heads = []
|
|
for branch, branch_heads in compat.iteritems(branchmap):
|
|
heads.extend(branch_heads)
|
|
|
|
if check_version(3, 0):
|
|
from mercurial import exchange
|
|
exchange.pull(repo, peer, heads=heads, force=True)
|
|
else:
|
|
repo.pull(peer, heads=heads, force=True)
|
|
|
|
updatebookmarks(repo, peer)
|
|
|
|
return repo, branchmap
|
|
|
|
def rev_to_mark(rev):
|
|
return marks.from_rev(rev.hex())
|
|
|
|
def mark_to_rev(mark):
|
|
return marks.to_rev(mark)
|
|
|
|
# Walk backwards from revision b to determine which need importing
|
|
# For repos with many heads (and loooots of tagging) this walk should
|
|
# end as soon as possible, so use all the known revisions are negative.
|
|
def revwalk(repo, name, b):
|
|
positive = []
|
|
pending = set()
|
|
if not marks.is_marked(b.hex()):
|
|
pending.add(b.rev())
|
|
interval = b.rev() / 10
|
|
interval = interval if interval > 1000 else 1000
|
|
pfunc = repo.changelog.parentrevs
|
|
for cur in range(b.rev(), -1, -1):
|
|
if not pending:
|
|
break
|
|
|
|
if cur in pending:
|
|
positive.append(cur)
|
|
pending.remove(cur)
|
|
parents = [p for p in pfunc(cur) if p >= 0]
|
|
for p in parents:
|
|
if not marks.is_marked(repo[p].hex()):
|
|
pending.add(p)
|
|
|
|
if cur % interval == 0:
|
|
puts(b"progress revision walk '%s' (%d/%d)" % (name, (b.rev() - cur), b.rev()))
|
|
|
|
positive.reverse()
|
|
return positive
|
|
|
|
def export_ref(repo, name, kind, head):
|
|
ename = b'%s/%s' % (kind, name)
|
|
revs = revwalk(repo, ename, head)
|
|
total = len(revs)
|
|
|
|
for progress, rev in enumerate(revs):
|
|
|
|
c = repo[rev]
|
|
node = c.node()
|
|
|
|
(manifest, user, (time, tz), files, desc, extra) = repo.changelog.read(node)
|
|
rev_branch = extra[b'branch']
|
|
|
|
author = b"%s %d %s" % (fixup_user(user), time, gittz(tz))
|
|
if b'committer' in extra:
|
|
try:
|
|
cuser, ctime, ctz = extra[b'committer'].rsplit(b' ', 2)
|
|
committer = b"%s %s %s" % (fixup_user(cuser), ctime, gittz(int(ctz)))
|
|
except ValueError:
|
|
cuser = extra[b'committer']
|
|
committer = b"%s %d %s" % (fixup_user(cuser), time, gittz(tz))
|
|
else:
|
|
committer = author
|
|
|
|
parents = [repo[p] for p in repo.changelog.parentrevs(rev) if p >= 0]
|
|
|
|
if len(parents) == 0:
|
|
modified = list(c.manifest().keys())
|
|
removed = []
|
|
else:
|
|
modified, removed = get_filechanges(repo, c, parents[0])
|
|
|
|
desc += b'\n'
|
|
|
|
if mode == 'hg':
|
|
extra_msg = b''
|
|
|
|
if rev_branch != b'default':
|
|
extra_msg += b'branch : %s\n' % rev_branch
|
|
|
|
renames = []
|
|
for f in c.files():
|
|
if f not in c.manifest():
|
|
continue
|
|
rename = c.filectx(f).renamed()
|
|
if rename:
|
|
renames.append((rename[0], f))
|
|
|
|
# NOTE no longer used in hg-git, a HG:rename extra header is used
|
|
for e in renames:
|
|
extra_msg += b"rename : %s => %s\n" % e
|
|
|
|
for key, value in compat.iteritems(extra):
|
|
if key in (b'author', b'committer', b'encoding', b'message', b'branch', b'hg-git', b'transplant_source'):
|
|
continue
|
|
elif key == b'hg-git-rename-source' and value == b'git':
|
|
# extra data that hg-git might put there unconditionally
|
|
# or that we put in there to be compatible
|
|
continue
|
|
else:
|
|
extra_msg += b"extra : %s : %s\n" % (key, compat.urlquote(value))
|
|
|
|
if extra_msg:
|
|
desc += b'\n--HG--\n' + extra_msg
|
|
|
|
if len(parents) == 0:
|
|
puts(b'reset %s/%s' % (prefix, ename))
|
|
|
|
modified_final = [export_file(c, fname) for fname in modified]
|
|
|
|
puts(b"commit %s/%s" % (prefix, ename))
|
|
puts(b"mark :%d" % (marks.get_mark(c.hex())))
|
|
puts(b"author %s" % (author))
|
|
puts(b"committer %s" % (committer))
|
|
puts(b"data %d" % (len(desc)))
|
|
puts(desc)
|
|
|
|
if len(parents) > 0:
|
|
puts(b"from :%u" % (rev_to_mark(parents[0])))
|
|
if len(parents) > 1:
|
|
puts(b"merge :%u" % (rev_to_mark(parents[1])))
|
|
|
|
for f in removed:
|
|
puts(b"D %s" % (fix_file_path(f)))
|
|
for f in modified_final:
|
|
puts(b"M %s :%u %s" % f)
|
|
puts()
|
|
|
|
if (progress % 100 == 0):
|
|
puts(b"progress revision %d '%s' (%d/%d)" % (rev, name, progress, total))
|
|
|
|
# make sure the ref is updated
|
|
puts(b"reset %s/%s" % (prefix, ename))
|
|
puts(b"from :%u" % rev_to_mark(head))
|
|
puts()
|
|
|
|
pending_revs = set(revs) - notes
|
|
if pending_revs:
|
|
desc = b"Notes for %s\n" % (name)
|
|
update_notes([repo[rev].hex() for rev in pending_revs], desc, False)
|
|
notes.update(pending_revs)
|
|
|
|
def export_tag(repo, tag):
|
|
export_ref(repo, tag, b'tags', scmutil.revsingle(repo, hgref(tag)))
|
|
|
|
def export_bookmark(repo, bmark):
|
|
head = bmarks[hgref(bmark)]
|
|
export_ref(repo, bmark, b'bookmarks', head)
|
|
|
|
def export_branch(repo, branch):
|
|
tip = get_branch_tip(repo, branch)
|
|
head = repo[tip]
|
|
export_ref(repo, branch, b'branches', head)
|
|
|
|
def export_head(repo):
|
|
export_ref(repo, g_head[0], b'bookmarks', g_head[1])
|
|
|
|
def do_capabilities(parser):
|
|
puts(b"import")
|
|
if capability_push:
|
|
puts(b"push")
|
|
else:
|
|
puts(b"export")
|
|
puts(b"refspec refs/heads/branches/*:%s/branches/*" % prefix)
|
|
puts(b"refspec refs/heads/*:%s/bookmarks/*" % prefix)
|
|
puts(b"refspec refs/tags/*:%s/tags/*" % prefix)
|
|
|
|
path = os.path.join(marksdir, b'marks-git')
|
|
|
|
if os.path.exists(path):
|
|
puts(b"*import-marks %s" % path)
|
|
puts(b"*export-marks %s" % path)
|
|
puts(b"option")
|
|
# nothing really depends on the private refs being up to date
|
|
# (export is limited anyway by the current git marks)
|
|
# and they are not always updated correctly (dry-run, bookmark delete, ...)
|
|
# (might resolve some dry-run breakage also)
|
|
puts(b"no-private-update")
|
|
|
|
puts()
|
|
|
|
def branch_tip(branch):
|
|
return branches[branch][-1]
|
|
|
|
def get_branch_tip(repo, branch):
|
|
heads = branches.get(hgref(branch), None)
|
|
if not heads:
|
|
return None
|
|
|
|
# verify there's only one head
|
|
if (len(heads) > 1):
|
|
warn(b"Branch '%s' has more than one head, consider merging" % branch)
|
|
return branch_tip(hgref(branch))
|
|
|
|
return heads[0]
|
|
|
|
def list_head(repo, cur):
|
|
global g_head, fake_bmark
|
|
|
|
if b'default' not in branches:
|
|
# empty repo
|
|
return
|
|
|
|
node = repo[branch_tip(b'default')]
|
|
head = b'master' if b'master' not in bmarks else b'default'
|
|
fake_bmark = head
|
|
bmarks[head] = node
|
|
|
|
head = gitref(head)
|
|
puts(b"@refs/heads/%s HEAD" % head)
|
|
g_head = (head, node)
|
|
|
|
def do_list(parser, branchmap):
|
|
repo = parser.repo
|
|
for bmark, node in compat.iteritems(bookmarks.listbookmarks(repo)):
|
|
bmarks[bmark] = repo[node]
|
|
|
|
cur = repo.dirstate.branch()
|
|
|
|
for branch, heads in compat.iteritems(branchmap):
|
|
# only open heads
|
|
try:
|
|
heads = [h for h in heads if b'close' not in repo.changelog.read(h)[5]]
|
|
if heads:
|
|
branches[branch] = heads
|
|
except error.LookupError:
|
|
branches[branch] = heads
|
|
|
|
list_head(repo, cur)
|
|
|
|
ignore_ref = get_config('remote-hg.ignore-name', True)
|
|
ignore_re = []
|
|
for exp in ignore_ref.splitlines():
|
|
if exp:
|
|
try:
|
|
ignore_re.append(re.compile(exp.strip()))
|
|
except:
|
|
warn(b"Invalid regular expression '%s'" % (exp))
|
|
def ignore(kind, name):
|
|
for r in ignore_re:
|
|
if r.search(name):
|
|
warn(b"Ignoring matched %s %s" % (kind, name))
|
|
return True
|
|
return False
|
|
|
|
# for export command a ref's old_sha1 is taken from private namespace ref
|
|
# for push command a fake one is provided
|
|
# that avoids having the ref status reported as a new branch/tag
|
|
# (though it will be marked as FETCH_FIRST prior to push,
|
|
# but that's ok as we will provide proper status)
|
|
for_push = (parser.line.find(b'for-push') >= 0)
|
|
sha1 = b'f' * 40 if (capability_push and for_push) else b'?'
|
|
|
|
if track_branches:
|
|
for branch in branches:
|
|
if not ignore(b'branch', branch):
|
|
puts(b"%s refs/heads/branches/%s" % (sha1, gitref(branch)))
|
|
|
|
for bmark in bmarks:
|
|
if bmarks[bmark].hex() == b'0' * 40:
|
|
warn(b"Ignoring invalid bookmark '%s'" % bmark)
|
|
elif not ignore('bookmark', bmark):
|
|
puts(b"%s refs/heads/%s" % (sha1, gitref(bmark)))
|
|
|
|
for tag, node in repo.tagslist():
|
|
if tag == b'tip':
|
|
continue
|
|
if not ignore(b'tag', tag):
|
|
puts(b"%s refs/tags/%s" % (sha1, gitref(tag)))
|
|
|
|
puts()
|
|
|
|
def do_import(parser):
|
|
repo = parser.repo
|
|
|
|
path = os.path.join(marksdir, b'marks-git')
|
|
|
|
puts(b"feature done")
|
|
if os.path.exists(path):
|
|
puts(b"feature import-marks=%s" % path)
|
|
puts(b"feature export-marks=%s" % path)
|
|
puts(b"feature force")
|
|
compat.stdout.flush()
|
|
|
|
tmp = encoding.encoding
|
|
encoding.encoding = b'utf-8'
|
|
|
|
# lets get all the import lines
|
|
while parser.check(b'import'):
|
|
ref = parser[1]
|
|
|
|
if (ref == b'HEAD'):
|
|
export_head(repo)
|
|
elif ref.startswith(b'refs/heads/branches/'):
|
|
branch = ref[len(b'refs/heads/branches/'):]
|
|
export_branch(repo, branch)
|
|
elif ref.startswith(b'refs/heads/'):
|
|
bmark = ref[len(b'refs/heads/'):]
|
|
export_bookmark(repo, bmark)
|
|
elif ref.startswith(b'refs/tags/'):
|
|
tag = ref[len(b'refs/tags/'):]
|
|
export_tag(repo, tag)
|
|
|
|
parser.next()
|
|
|
|
encoding.encoding = tmp
|
|
|
|
puts(b'done')
|
|
|
|
def parse_blob(parser):
|
|
parser.next()
|
|
mark = parser.get_mark()
|
|
parser.next()
|
|
data = parser.get_data()
|
|
blob_marks[mark] = data
|
|
parser.next()
|
|
|
|
def get_file_metadata(repo, p1, files):
|
|
for e in files:
|
|
f = files[e]
|
|
if 'rename' in f and 'mode' not in f:
|
|
old = f['rename']
|
|
ctx = repo[p1][old]
|
|
f['mode'] = ctx.flags()
|
|
f['data'] = ctx.data()
|
|
|
|
def get_merge_files(repo, p1, p2, files):
|
|
for e in repo[p1].files():
|
|
if e not in files:
|
|
if e not in repo[p1].manifest():
|
|
continue
|
|
f = { 'ctx': repo[p1][e] }
|
|
files[e] = f
|
|
|
|
def split_line_pathnames(line):
|
|
if line[2] != compat.char(b'"'):
|
|
return line.split(b' ', 2)
|
|
else:
|
|
p = 3
|
|
while p >= 0:
|
|
if line[p] == compat.char(b'"') and line[p - 1] != compat.char(b'\\'):
|
|
return compat.char(line[0]), line[2:p+1], line[p+2:]
|
|
p = line.find(b'"', p + 1)
|
|
# hm, should not happen
|
|
die(b'Malformed file command: %s' % (line))
|
|
|
|
def c_style_unescape(string):
|
|
if string[0] == string[-1] == compat.char(b'"'):
|
|
return compat.unescape(string[1:-1])
|
|
return string
|
|
|
|
# sigh; handle context creation for various (incompatible) versions
|
|
def make_memfilectx(repo, memctx, path, data, is_link, is_exec, *args):
|
|
if check_version(4, 5):
|
|
return context.memfilectx(repo, memctx, path, data, is_link, is_exec, *args)
|
|
if check_version(3, 1):
|
|
return context.memfilectx(repo, path, data, is_link, is_exec, *args)
|
|
else:
|
|
return context.memfilectx(path, data, is_link, is_exec, *args)
|
|
|
|
def parse_commit(parser):
|
|
from_mark = merge_mark = None
|
|
|
|
remoteref = parser.context.remoteref
|
|
ref = parser[1] if not remoteref else remoteref
|
|
parser.next()
|
|
|
|
commit_mark = parser.get_mark()
|
|
parser.next()
|
|
author = parser.get_author()
|
|
parser.next()
|
|
committer = parser.get_author()
|
|
parser.next()
|
|
data = parser.get_data()
|
|
parser.next()
|
|
if parser.check(b'from'):
|
|
from_mark = parser.get_mark()
|
|
parser.next()
|
|
if parser.check(b'merge'):
|
|
merge_mark = parser.get_mark()
|
|
parser.next()
|
|
if parser.check(b'merge'):
|
|
die('octopus merges are not supported yet')
|
|
|
|
# fast-export adds an extra newline
|
|
if data[-1] == compat.char(b'\n'):
|
|
data = data[:-1]
|
|
|
|
files = {}
|
|
|
|
for line in parser:
|
|
if parser.check(b'M'):
|
|
t, m, mark_ref, path = line.split(b' ', 3)
|
|
if m == b'160000':
|
|
# This is a submodule -- there is no reasonable
|
|
# way to import it.
|
|
blob_data = b"[git-remote-hg: skipped import of submodule at %s]" % mark_ref
|
|
else:
|
|
mark = int(mark_ref[1:])
|
|
blob_data = blob_marks[mark]
|
|
f = { 'mode': hgmode(m), 'data': blob_data }
|
|
elif parser.check(b'D'):
|
|
t, path = line.split(b' ', 1)
|
|
f = { 'deleted': True }
|
|
elif parser.check(b'R'):
|
|
t, old, path = split_line_pathnames(line)
|
|
old = c_style_unescape(old)
|
|
f = { 'rename': old }
|
|
# also mark old deleted
|
|
files[old] = { 'deleted': True }
|
|
elif parser.check(b'C'):
|
|
t, old, path = split_line_pathnames(line)
|
|
f = { 'rename': c_style_unescape(old) }
|
|
else:
|
|
die(b'Unknown file command: %s' % line)
|
|
path = c_style_unescape(path)
|
|
files[path] = files.get(path, {})
|
|
files[path].update(f)
|
|
|
|
# only export the commits if we are on an internal proxy repo
|
|
if dry_run and not peer:
|
|
parsed_refs[ref] = None
|
|
return
|
|
|
|
# check if this is an hg commit we have in some other repo
|
|
gitmarks = parser.context.gitmarks
|
|
if gitmarks:
|
|
gitcommit = gitmarks.to_rev(commit_mark)
|
|
hgrev = get_rev_hg(gitcommit)
|
|
if hgrev:
|
|
hghelper = parser.context.hghelper
|
|
if not hghelper:
|
|
puts(b"error %s rejected not pushing hg based commit %s" % (ref, gitcommit))
|
|
raise UserWarning("check-hg-commits")
|
|
# must be in some local repo
|
|
# find it and push it to the target local repo
|
|
# (rather than making a commit into it)
|
|
# probably not already in target repo, but let's make sure
|
|
if hgrev not in parser.repo:
|
|
srepo = hghelper.githgrepo.find_hg_repo(hgrev)
|
|
if not srepo:
|
|
# pretty bad, if identified as hg revision, we should have it somewhere
|
|
# but is possible if the originating repo has been removed now
|
|
# warn elaborately and fail given the current settings
|
|
description = b"\n" \
|
|
b"commit %s corresponds \nto hg revision %s,\n" \
|
|
b"but could not find latter in any fetched hg repo.\n" \
|
|
b"Please resolve the inconsistency or disable pushing hg commits" \
|
|
% (gitcommit, hgrev)
|
|
die(description)
|
|
warn(b'Pushing hg changeset %s for %s' % (hgrev, gitcommit))
|
|
# target is local repo so should have a root
|
|
# force push since otherwise forcibly commit anyway
|
|
# (and needed for multiple head case etc)
|
|
push(srepo, hg.peer(srepo.ui, {}, parser.repo.root), [hgbin(hgrev)], True)
|
|
else:
|
|
# could already be present, particularly in shared proxy repo
|
|
warn(b'Using hg changeset %s for %s' % (hgrev, gitcommit))
|
|
# track mark and are done here
|
|
parsed_refs[ref] = hgrev
|
|
marks.new_mark(hgrev, commit_mark)
|
|
return
|
|
|
|
def getfilectx(repo, memctx, f):
|
|
of = files[f]
|
|
if 'deleted' in of:
|
|
if check_version(3, 2):
|
|
return None
|
|
else:
|
|
raise IOError
|
|
if 'ctx' in of:
|
|
if mode == 'hg' and check_version(3, 2):
|
|
# force the creation of a new filelog
|
|
ctx = of['ctx']
|
|
is_exec = ctx.isexec()
|
|
is_link = ctx.islink()
|
|
return make_memfilectx(repo, memctx, f, ctx.data(), is_link, is_exec)
|
|
else:
|
|
return of['ctx']
|
|
is_exec = of['mode'] == b'x'
|
|
is_link = of['mode'] == b'l'
|
|
rename = of.get('rename', None)
|
|
return make_memfilectx(repo, memctx, f, of['data'], is_link, is_exec, rename)
|
|
|
|
repo = parser.repo
|
|
|
|
user, date, tz = author
|
|
extra = {}
|
|
|
|
if committer != author:
|
|
extra[b'committer'] = b"%s %u %u" % committer
|
|
|
|
if from_mark:
|
|
p1 = mark_to_rev(from_mark)
|
|
else:
|
|
p1 = b'0' * 40
|
|
|
|
if merge_mark:
|
|
p2 = mark_to_rev(merge_mark)
|
|
else:
|
|
p2 = b'0' * 40
|
|
|
|
#
|
|
# If files changed from any of the parents, hg wants to know, but in git if
|
|
# nothing changed from the first parent, nothing changed.
|
|
#
|
|
if merge_mark:
|
|
get_merge_files(repo, p1, p2, files)
|
|
|
|
# need to obtain file metadata for copied and renamed files that have
|
|
# no filemodify line; let's get that from the old file in parent revision
|
|
if from_mark:
|
|
get_file_metadata(repo, p1, files)
|
|
|
|
# Check if the ref is supposed to be a named branch
|
|
if ref.startswith(b'refs/heads/branches/'):
|
|
branch = ref[len(b'refs/heads/branches/'):]
|
|
extra[b'branch'] = hgref(branch)
|
|
|
|
if mode == 'hg':
|
|
# add some extra that hg-git adds (almost) unconditionally
|
|
# see also https://foss.heptapod.net/mercurial/hg-git/-/merge_requests/211
|
|
# NOTE it could be changed to another value below
|
|
# actually, it is *almost* unconditionally, and only done if the commit
|
|
# is deduced to originate in git. However, the latter is based on
|
|
# presence/absence of HG markers in commit "extra headers".
|
|
# The latter can not be handled here, and so this can not be correctly
|
|
# reproduced.
|
|
# extra[b'hg-git-rename-source'] = b'git'
|
|
i = data.find(b'\n--HG--\n')
|
|
if i >= 0:
|
|
tmp = data[i + len(b'\n--HG--\n'):].strip()
|
|
for k, v in [e.split(b' : ', 1) for e in tmp.split(b'\n')]:
|
|
# NOTE no longer used in hg-git, a HG:rename extra header is used
|
|
if k == b'rename':
|
|
old, new = v.split(b' => ', 1)
|
|
files[new]['rename'] = old
|
|
elif k == b'branch':
|
|
extra[k] = v
|
|
elif k == b'extra':
|
|
ek, ev = v.split(b' : ', 1)
|
|
extra[ek] = compat.urlunquote(ev)
|
|
data = data[:i]
|
|
|
|
ctx = context.memctx(repo, (p1, p2), data,
|
|
list(files.keys()), getfilectx,
|
|
user, (date, tz), extra)
|
|
|
|
tmp = encoding.encoding
|
|
encoding.encoding = b'utf-8'
|
|
|
|
node = hghex(repo.commitctx(ctx))
|
|
|
|
encoding.encoding = tmp
|
|
|
|
parsed_refs[ref] = node
|
|
marks.new_mark(node, commit_mark)
|
|
parser.context.revs.append(node)
|
|
|
|
def parse_reset(parser):
|
|
remoteref = parser.context.remoteref
|
|
ref = parser[1] if not remoteref else remoteref
|
|
parser.next()
|
|
# ugh
|
|
if parser.check(b'commit'):
|
|
parse_commit(parser)
|
|
return
|
|
if not parser.check(b'from'):
|
|
return
|
|
from_mark = parser.get_mark()
|
|
parser.next()
|
|
|
|
try:
|
|
rev = mark_to_rev(from_mark)
|
|
except KeyError:
|
|
rev = None
|
|
parsed_refs[ref] = rev
|
|
|
|
def parse_tag(parser):
|
|
name = parser[1]
|
|
parser.next()
|
|
from_mark = parser.get_mark()
|
|
parser.next()
|
|
tagger = parser.get_author()
|
|
parser.next()
|
|
data = parser.get_data()
|
|
parser.next()
|
|
|
|
try:
|
|
rev = mark_to_rev(from_mark)
|
|
except KeyError:
|
|
rev = None
|
|
parsed_refs[b'refs/tags/' + name] = rev
|
|
|
|
parsed_tags[name] = (tagger, data)
|
|
|
|
def write_tag(repo, tag, node, msg, author):
|
|
branch = repo[node].branch()
|
|
tip = branch_tip(branch)
|
|
tip = repo[tip]
|
|
|
|
def getfilectx(repo, memctx, f):
|
|
try:
|
|
fctx = tip.filectx(f)
|
|
data = fctx.data()
|
|
except error.LookupError:
|
|
data = b""
|
|
if data and not data.endswith(b'\n'):
|
|
data += b'\n'
|
|
content = data + b"%s %s\n" % (node, tag)
|
|
return make_memfilectx(repo, memctx, f, content, False, False, None)
|
|
|
|
p1 = tip.hex()
|
|
p2 = b'0' * 40
|
|
if author:
|
|
user, date, tz = author
|
|
date_tz = (date, tz)
|
|
else:
|
|
cmd = ['git', 'var', 'GIT_COMMITTER_IDENT']
|
|
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
|
output, _ = process.communicate()
|
|
m = re.match(b'^.* <.*>', output)
|
|
if m:
|
|
user = m.group(0)
|
|
else:
|
|
user = repo.ui.username()
|
|
date_tz = None
|
|
|
|
ctx = context.memctx(repo, (p1, p2), msg,
|
|
[b'.hgtags'], getfilectx,
|
|
user, date_tz, {b'branch': branch})
|
|
|
|
tmp = encoding.encoding
|
|
encoding.encoding = b'utf-8'
|
|
|
|
tagnode = repo.commitctx(ctx)
|
|
|
|
encoding.encoding = tmp
|
|
|
|
return (tagnode, branch)
|
|
|
|
def checkheads_bmark(repo, ref, ctx, force):
|
|
bmark = ref[len(b'refs/heads/'):]
|
|
if bmark not in bmarks:
|
|
# new bmark
|
|
return True
|
|
|
|
ctx_old = bmarks[bmark]
|
|
ctx_new = ctx
|
|
|
|
if ctx.rev() is None:
|
|
puts(b"error %s unknown" % ref)
|
|
return False
|
|
|
|
# replaced around Mercurial 4.7
|
|
isancestor = repo.changelog.isancestorrev if hasattr(repo.changelog, 'isancestorrev') \
|
|
else repo.changelog.descendant
|
|
if not isancestor(ctx_old.rev(), ctx_new.rev()):
|
|
if force:
|
|
puts(b"ok %s forced update" % ref)
|
|
else:
|
|
puts(b"error %s non-fast forward" % ref)
|
|
return False
|
|
|
|
return True
|
|
|
|
def checkheads(repo, remote, p_revs, force):
|
|
|
|
remotemap = remote.branchmap()
|
|
if not remotemap:
|
|
# empty repo
|
|
return True
|
|
|
|
new = {}
|
|
ret = True
|
|
|
|
for node, ref in compat.iteritems(p_revs):
|
|
ctx = repo[node]
|
|
branch = ctx.branch()
|
|
if branch not in remotemap:
|
|
# new branch
|
|
continue
|
|
if not ref.startswith(b'refs/heads/branches'):
|
|
if ref.startswith(b'refs/heads/'):
|
|
if not checkheads_bmark(repo, ref, ctx, force):
|
|
ret = False
|
|
|
|
# only check branches
|
|
continue
|
|
new.setdefault(branch, []).append(ctx.rev())
|
|
|
|
for branch, heads in compat.iteritems(new):
|
|
old = [repo.changelog.rev(x) for x in remotemap[branch]]
|
|
for rev in heads:
|
|
if check_version(2, 3):
|
|
ancestors = repo.changelog.ancestors([rev], stoprev=min(old))
|
|
else:
|
|
ancestors = repo.changelog.ancestors(rev)
|
|
found = False
|
|
|
|
for x in old:
|
|
if x in ancestors:
|
|
found = True
|
|
break
|
|
|
|
if found:
|
|
continue
|
|
|
|
node = repo.changelog.node(rev)
|
|
ref = p_revs[node]
|
|
if force:
|
|
puts(b"ok %s forced update" % ref)
|
|
else:
|
|
puts(b"error %s non-fast forward" % ref)
|
|
ret = False
|
|
|
|
return ret
|
|
|
|
def push_unsafe(repo, remote, p_revs, force):
|
|
|
|
fci = discovery.findcommonincoming
|
|
if check_version(4, 5):
|
|
commoninc = fci(repo, remote, force=force, ancestorsof=list(p_revs))
|
|
else:
|
|
commoninc = fci(repo, remote, force=force)
|
|
common, _, remoteheads = commoninc
|
|
fco = discovery.findcommonoutgoing
|
|
outgoing = fco(repo, remote, onlyheads=list(p_revs), commoninc=commoninc, force=force)
|
|
|
|
# nice to know about this rather than assume a bogus error
|
|
# also, some remote peertypes might otherwise be surprised further down
|
|
if not outgoing.missing:
|
|
return None
|
|
|
|
if check_version(4, 0):
|
|
if hasattr(changegroup, 'getlocalchangegroup'):
|
|
cg = changegroup.getlocalchangegroup(repo, b'push', outgoing)
|
|
else:
|
|
# as of about version 4.4
|
|
cg = changegroup.makechangegroup(repo, outgoing, b'01', b'push')
|
|
elif check_version(3, 2):
|
|
cg = changegroup.getchangegroup(repo, 'push', heads=list(p_revs), common=common)
|
|
elif check_version(3, 0):
|
|
cg = changegroup.getbundle(repo, 'push', heads=list(p_revs), common=common)
|
|
else:
|
|
cg = repo.getbundle('push', heads=list(p_revs), common=common)
|
|
|
|
unbundle = remote.capable(b'unbundle')
|
|
if unbundle:
|
|
if force:
|
|
remoteheads = [b'force']
|
|
ret = remote.unbundle(cg, remoteheads, b'push')
|
|
else:
|
|
ret = remote.addchangegroup(cg, b'push', repo.url())
|
|
|
|
return ret
|
|
|
|
def push(repo, remote, p_revs, force):
|
|
if hasattr(remote, 'canpush') and not remote.canpush():
|
|
puts(b"error cannot push")
|
|
|
|
if not p_revs:
|
|
# nothing to push
|
|
return
|
|
|
|
lock = None
|
|
unbundle = remote.capable(b'unbundle')
|
|
if not unbundle:
|
|
lock = remote.lock()
|
|
try:
|
|
ret = push_unsafe(repo, remote, p_revs, force)
|
|
finally:
|
|
if lock is not None:
|
|
lock.release()
|
|
|
|
return ret
|
|
|
|
def bookmark_is_fake(bmark, real_bmarks):
|
|
return bmark == fake_bmark or \
|
|
(bmark == b'master' and bmark not in real_bmarks)
|
|
|
|
def do_export(parser):
|
|
do_push_hg(parser)
|
|
puts()
|
|
|
|
def do_push_hg(parser):
|
|
global parsed_refs, parsed_tags
|
|
p_bmarks = []
|
|
p_revs = {}
|
|
ok_refs = []
|
|
|
|
parsed_refs = {}
|
|
parsed_tags = {}
|
|
|
|
parser.next()
|
|
|
|
remoteref = parser.context.remoteref
|
|
if remoteref and not parser.line:
|
|
# if remoteref is in past exported
|
|
# git-fast-export might not produce anything at all
|
|
# that is ok'ish, we will determine parsed_ref another way
|
|
localref = parser.context.localref
|
|
hgrev = get_rev_hg(localref)
|
|
if not hgrev:
|
|
# maybe the notes are not updated
|
|
# happens only on fetch for now ... let's ask for that
|
|
puts("error %s fetch first" % remoteref)
|
|
return False
|
|
parsed_refs[remoteref] = hgrev
|
|
# now make parser happy
|
|
parser.line = b'done'
|
|
|
|
for line in parser.each_block(b'done'):
|
|
if parser.check(b'blob'):
|
|
parse_blob(parser)
|
|
elif parser.check(b'commit'):
|
|
parse_commit(parser)
|
|
elif parser.check(b'reset'):
|
|
parse_reset(parser)
|
|
elif parser.check(b'tag'):
|
|
parse_tag(parser)
|
|
elif parser.check(b'feature'):
|
|
pass
|
|
else:
|
|
die(b'unhandled export command: %s' % line)
|
|
|
|
for ref, node in compat.iteritems(parsed_refs):
|
|
bnode = hgbin(node) if node else None
|
|
if ref.startswith(b'refs/heads/branches'):
|
|
branch = ref[len(b'refs/heads/branches/'):]
|
|
if branch in branches and bnode in branches[branch]:
|
|
# up to date
|
|
puts(b"ok %s up to date" % ref)
|
|
continue
|
|
|
|
p_revs[bnode] = ref
|
|
ok_refs.append(ref)
|
|
elif ref.startswith(b'refs/heads/'):
|
|
bmark = ref[len(b'refs/heads/'):]
|
|
new = node
|
|
old = bmarks[bmark].hex() if bmark in bmarks else b''
|
|
|
|
if old == new:
|
|
puts(b"ok %s up to date" % ref)
|
|
continue
|
|
|
|
ok_refs.append(ref)
|
|
if not bookmark_is_fake(bmark, parser.repo._bookmarks):
|
|
p_bmarks.append((ref, bmark, old, new))
|
|
|
|
p_revs[bnode] = ref
|
|
elif ref.startswith(b'refs/tags/'):
|
|
if dry_run:
|
|
ok_refs.append(ref)
|
|
continue
|
|
tag = ref[len(b'refs/tags/'):]
|
|
tag = hgref(tag)
|
|
author, msg = parsed_tags.get(tag, (None, None))
|
|
if mode == 'git':
|
|
if not msg:
|
|
msg = b'Added tag %s for changeset %s' % (tag, node[:12])
|
|
tagnode, branch = write_tag(parser.repo, tag, node, msg, author)
|
|
p_revs[tagnode] = b'refs/heads/branches/' + gitref(branch)
|
|
else:
|
|
vfs = parser.repo.vfs if hasattr(parser.repo, 'vfs') \
|
|
else parser.repo.opener
|
|
prevtags = b''
|
|
try:
|
|
fp = vfs(b'localtags', b'r+')
|
|
except IOError:
|
|
fp = vfs(b'localtags', b'a')
|
|
else:
|
|
prevtags = fp.read()
|
|
# ensure at end
|
|
fp.seek(0, io.SEEK_END)
|
|
if prevtags and not prevtags.endswith(b'\n'):
|
|
fp.write(b'\n')
|
|
fp.write(b'%s %s\n' % (node, tag))
|
|
fp.close()
|
|
p_revs[bnode] = ref
|
|
ok_refs.append(ref)
|
|
else:
|
|
# transport-helper/fast-export bugs
|
|
continue
|
|
|
|
if dry_run:
|
|
if not peer or checkheads(parser.repo, peer, p_revs, force_push):
|
|
for ref in ok_refs:
|
|
puts(b"ok %s" % ref)
|
|
return
|
|
|
|
success = True
|
|
if peer:
|
|
if not checkheads(parser.repo, peer, p_revs, force_push):
|
|
return False
|
|
ret = push(parser.repo, peer, p_revs, force_push)
|
|
# None ok: nothing to push
|
|
if ret != None and not ret:
|
|
# do not update bookmarks
|
|
return
|
|
|
|
# update remote bookmarks
|
|
remote_bmarks = peer.listkeys(b'bookmarks')
|
|
for ref, bmark, old, new in p_bmarks:
|
|
if force_push:
|
|
old = remote_bmarks.get(bmark, b'')
|
|
if not peer.pushkey(b'bookmarks', bmark, old, new):
|
|
success = False
|
|
puts(b"error %s" % ref)
|
|
ok_refs.remove(ref)
|
|
else:
|
|
# update local bookmarks
|
|
for ref, bmark, old, new in p_bmarks:
|
|
if not bookmarks.pushbookmark(parser.repo, bmark, old, new):
|
|
success = False
|
|
puts(b"error %s" % ref)
|
|
ok_refs.remove(ref)
|
|
|
|
# update rest of the refs
|
|
for ref in ok_refs:
|
|
puts(b"ok %s" % ref)
|
|
|
|
return success
|
|
|
|
def delete_bookmark(parser, ref):
|
|
bmark = ref[len(b'refs/heads/'):]
|
|
if bmark == fake_bmark:
|
|
return False
|
|
# delete local (proxy or target)
|
|
old = bmarks[bmark].hex() if bmark in bmarks else b''
|
|
if not old:
|
|
return False
|
|
ok = False
|
|
if old:
|
|
ok = bookmarks.pushbookmark(parser.repo, bmark, old, b'')
|
|
# propagate to peer if appropriate
|
|
if ok and peer:
|
|
remote_bmarks = peer.listkeys(b'bookmarks')
|
|
old = remote_bmarks.get(bmark, b'')
|
|
ok = peer.pushkey(b'bookmarks', bmark, old, b'')
|
|
# delete private ref
|
|
if ok:
|
|
pbookmark = b'%s/heads/%s' % (prefix, bmark)
|
|
subprocess.call(['git', 'update-ref', '-d', pbookmark])
|
|
return ok
|
|
|
|
def do_push_refspec(parser, refspec, revs):
|
|
global force_push
|
|
|
|
force = (refspec[0] == compat.char(b'+'))
|
|
refs = refspec.strip(b'+').split(b':')
|
|
# check for delete
|
|
if (not refs[0]) and refs[1].startswith(b'refs/heads') and \
|
|
not refs[1].startswith(b'refs/heads/branches'):
|
|
if not dry_run and not delete_bookmark(parser, refs[1]):
|
|
puts(b"error %s could not delete "% (refs[1]))
|
|
else:
|
|
puts(b"ok %s" % (refs[1]))
|
|
return
|
|
# sanity check on remote ref
|
|
if not (refs[1].startswith(b'refs/heads') or refs[1].startswith(b'refs/tags')):
|
|
puts(b"error %s refspec not supported " % refs[1])
|
|
return
|
|
ctx = ParserContext()
|
|
if refs[0] != refs[1]:
|
|
# would work and tag as requested, but pushing to a hg permanent branch
|
|
# based on a rename rather than a git branch is probably not a good idea
|
|
if refs[1].startswith(b'refs/heads/branches'):
|
|
puts(b"error %s not allowed for permanent branch" % refs[1])
|
|
return
|
|
ctx.remoteref = refs[1]
|
|
ctx.localref = refs[0]
|
|
# ok, fire up git-fast-export and process it
|
|
cmd = ['git', 'fast-export', '--use-done-feature']
|
|
fast_export_options = get_config('remote-hg.fast-export-options')
|
|
if not fast_export_options:
|
|
fast_export_options = '-M -C'
|
|
cmd.extend(fast_export_options.strip().split())
|
|
marks = os.path.join(marksdir, b'marks-git')
|
|
if os.path.exists(marks):
|
|
cmd.append(b'--import-marks=%s' % marks)
|
|
# optionally reuse existing hg commits in local repos
|
|
check_hg_commits = get_config('remote-hg.check-hg-commits').strip()
|
|
use_hg_commits = check_hg_commits in (b'fail', b'push')
|
|
# no commit of marks if dry_run
|
|
# and only commit if all went ok,
|
|
# otherwise some commits may no longer be exported next time/try around
|
|
tmpmarks = b''
|
|
if use_hg_commits or not dry_run:
|
|
tmpmarks = os.path.join(marksdir, b'marks-git-%d' % (os.getpid()))
|
|
cmd.append(b'--export-marks=%s' % tmpmarks)
|
|
cmd.append(refs[0])
|
|
# a parameter would obviously be nicer here ...
|
|
force_push = force
|
|
ok = False
|
|
tmpfastexport = None
|
|
try:
|
|
if use_hg_commits:
|
|
# we need the mapping from marks to commit
|
|
# so store the output first to a file (and marks get saved also),
|
|
# and then process that file
|
|
tmpfastexport = open(os.path.join(marksdir, b'git-fast-export-%d' % (os.getpid())), 'w+b')
|
|
subprocess.check_call(cmd, stdin=None, stdout=tmpfastexport)
|
|
try:
|
|
import imp
|
|
sys.dont_write_bytecode = True
|
|
ctx.hghelper = imp.load_source('hghelper', \
|
|
os.path.join(os.path.dirname(__file__), 'git-hg-helper'))
|
|
ctx.hghelper.init_git(gitdir)
|
|
ctx.gitmarks = ctx.hghelper.GitMarks(tmpmarks)
|
|
# let processing know it should not bother pushing if not requested
|
|
if check_hg_commits != b'push':
|
|
ctx.hghelper = None
|
|
except:
|
|
die("check-hg-commits setup failed; is git-hg-helper also installed?")
|
|
tmpfastexport.seek(0)
|
|
try:
|
|
nparser = Parser(parser.repo, tmpfastexport, ctx)
|
|
ok = do_push_hg(nparser)
|
|
except UserWarning:
|
|
ok = False
|
|
else:
|
|
# simply feed fast-export directly to processing
|
|
export = subprocess.Popen(cmd, stdin=None, stdout=subprocess.PIPE)
|
|
nparser = Parser(parser.repo, export.stdout, ctx)
|
|
ok = do_push_hg(nparser)
|
|
finally:
|
|
if tmpmarks and os.path.exists(tmpmarks):
|
|
if ok and not dry_run:
|
|
# the commits made it through, now we can commit
|
|
# sigh ... no atomic rename for existing destination on some platform ...
|
|
# (use unofficial platform check)
|
|
if os.sep != '/':
|
|
if os.path.exists(marks):
|
|
os.remove(marks)
|
|
os.rename(tmpmarks, marks)
|
|
revs[:] = nparser.context.revs
|
|
else:
|
|
os.remove(tmpmarks)
|
|
if tmpfastexport and os.path.exists(tmpfastexport.name):
|
|
tmpfastexport.close()
|
|
os.remove(tmpfastexport.name)
|
|
|
|
def update_notes(revs, desc, run_import):
|
|
if not revs:
|
|
return
|
|
|
|
if run_import:
|
|
# spin up fast-import
|
|
gitmarks = os.path.join(marksdir, b'marks-git')
|
|
# marks should exist by now
|
|
# no export of marks since notes commits are not relevant
|
|
proc = subprocess.Popen(['git', 'fast-import', '--done', '--quiet',
|
|
b'--import-marks=%s' % gitmarks], stdin=subprocess.PIPE, stdout=sys.stderr)
|
|
# now feed fast-import
|
|
dest = proc.stdin
|
|
else:
|
|
proc = None
|
|
dest = compat.stdout
|
|
|
|
note_mark = marks.next_mark()
|
|
ref = b"refs/notes/hg"
|
|
dest.write(b"commit %s\n" % ref)
|
|
dest.write(b"mark :%d\n" % (note_mark))
|
|
dest.write(b"committer remote-hg <> %d %s\n" % (ptime.time(), gittz(ptime.timezone)))
|
|
dest.write(b"data %d\n" % (len(desc)))
|
|
dest.write(desc + b'\n')
|
|
# continue incrementally on current notes branch (whenever possible)
|
|
# to avoid wiping out present content upon fetch of new repo
|
|
# but track along with the previous ref (e.g. as import goes along)
|
|
current_note = rev_parse(ref)
|
|
if current_note and not marks.last_note:
|
|
dest.write(b'from %s^0\n' % (ref))
|
|
elif marks.last_note:
|
|
dest.write(b'from :%u\n' % (marks.last_note))
|
|
for rev in revs:
|
|
dest.write(b"N inline :%u\n" % marks.from_rev(rev))
|
|
dest.write(b"data %d\n" % (len(rev)))
|
|
dest.write(rev + b'\n')
|
|
dest.write(b'\n')
|
|
marks.last_note = note_mark
|
|
|
|
if proc:
|
|
dest.write(b'done\n')
|
|
dest.flush()
|
|
proc.wait()
|
|
# fail hard if this fails
|
|
# that prevents the marks file from being written
|
|
# so we can have a fresh look with a fetch
|
|
if proc.returncode:
|
|
die('notes update failed with return %d; recover with git fetch' %
|
|
(proc.returncode))
|
|
|
|
def do_push(parser):
|
|
if os.environ.get('GIT_REMOTE_HG_DEBUG_PUSH'):
|
|
dump = b''
|
|
for line in parser:
|
|
dump += line + b'\n'
|
|
die(b'DEBUG push:\n%s' % (dump))
|
|
revs = []
|
|
for line in parser:
|
|
if parser.check(b'push'):
|
|
localrevs = []
|
|
do_push_refspec(parser, line.lstrip(b'push '), localrevs)
|
|
revs.extend(localrevs)
|
|
else:
|
|
die(b'unhandled push command: %s' % (line))
|
|
puts()
|
|
# at this stage, all external processes are done, marks files written
|
|
# so we can use those to update notes
|
|
# do so unconditionally because we can and should ....
|
|
update_notes(revs, b"Update notes on push", True)
|
|
|
|
def do_option(parser):
|
|
global dry_run, force_push
|
|
_, key, value = parser.line.split(b' ')
|
|
if key == b'dry-run':
|
|
dry_run = (value == b'true')
|
|
puts(b'ok')
|
|
elif key == b'force':
|
|
force_push = (value == b'true')
|
|
puts(b'ok')
|
|
else:
|
|
puts(b'unsupported')
|
|
|
|
def fix_path(alias, repo, orig_url):
|
|
url = compat.urlparse(orig_url, b'file')
|
|
if url.scheme != b'file' or os.path.isabs(os.path.expanduser(url.path)):
|
|
return
|
|
cmd = ['git', 'config', b'remote.%s.url' % alias, b"hg::%s" % os.path.abspath(orig_url)]
|
|
subprocess.call(cmd)
|
|
|
|
def select_private_refs(alias):
|
|
show_private_refs = get_config_bool('remote-hg.show-private-refs', False)
|
|
# selection is easy, but let's also clean the refs of the alternative
|
|
# in any case, will be recreated along the way as and when needed
|
|
if show_private_refs:
|
|
path = b"%s/refs" % (dirname)
|
|
if os.path.exists(path):
|
|
shutil.rmtree(path, True)
|
|
# in refs space
|
|
return b'refs/hg/%s' % alias
|
|
else:
|
|
refs = subprocess.Popen(['git', 'for-each-ref', \
|
|
'--format=delete %(refname)', 'refs/hg'], stdout=subprocess.PIPE)
|
|
update = subprocess.Popen(['git', 'update-ref', '--stdin'], stdin=refs.stdout)
|
|
refs.stdout.close() # helps with SIGPIPE
|
|
update.communicate()
|
|
# keep private implementation refs really private
|
|
return b'hg/%s/refs' % alias
|
|
|
|
def select_marks_dir(alias, gitdir, migrate):
|
|
dirname = os.path.join(gitdir, b'hg', alias)
|
|
private_gm = os.path.join(dirname, b'marks-git')
|
|
shared_dir = os.path.join(gitdir, b'hg')
|
|
shared_gm = os.path.join(shared_dir, b'marks-git')
|
|
shared_hgm = os.path.join(shared_dir, b'marks-hg')
|
|
# not good in either case
|
|
if os.path.exists(private_gm) and os.path.exists(shared_gm):
|
|
die(b'found both %s and %s' % (private_gm, shared_gm))
|
|
# retrieve setting
|
|
shared_marks = get_config('remote-hg.shared-marks').strip()
|
|
# standardize to True, False or None
|
|
shared_marks = get_config_bool('remote-hg.shared-marks', False) \
|
|
if shared_marks else None
|
|
# if no specific setting, select one here (favouring shared for new installation)
|
|
if shared_marks == None:
|
|
# select one automagically, favouring shared for new installation
|
|
if os.path.exists(private_gm):
|
|
shared_marks = False
|
|
elif not os.path.exists(shared_gm) and os.path.exists(shared_dir):
|
|
# not a fresh clone, but no shared setup, so use private
|
|
shared_marks = False
|
|
else:
|
|
shared_marks = True
|
|
# only migrate with explicit setting
|
|
migrate = False
|
|
# otherwise, there is not much to decide
|
|
# but a migration from one setup to the other might be needed
|
|
l = os.listdir(shared_dir) if os.path.exists(shared_dir) and migrate else []
|
|
if shared_marks and migrate:
|
|
# make sure all local ones are cleaned up
|
|
# use one of these (preferably origin) to seed the shared
|
|
seen_file = False
|
|
while l:
|
|
d = l.pop()
|
|
gitm = os.path.join(shared_dir, d, b'marks-git')
|
|
hgm = os.path.join(shared_dir, d, b'marks-hg')
|
|
# move marks to shared if no such yet (if origin or last one in line)
|
|
if os.path.exists(gitm) and os.path.exists(hgm) and \
|
|
not os.path.exists(shared_gm) and not os.path.exists(shared_hgm) and \
|
|
(d == b'origin' or not l):
|
|
warn(b'using marks of remote %s as shared marks' % (d))
|
|
seen_file = True
|
|
os.rename(gitm, shared_gm)
|
|
os.rename(hgm, shared_hgm)
|
|
for p in (gitm, hgm):
|
|
if os.path.exists(p):
|
|
seen_file = True
|
|
os.remove(p)
|
|
# all private marks removed, should have shared
|
|
if seen_file and (not os.path.exists(shared_gm) or not os.path.exists(shared_hgm)):
|
|
die('migration to shared marks failed; perform fetch to recover')
|
|
elif migrate:
|
|
if not os.path.exists(shared_gm) or not os.path.exists(shared_hgm):
|
|
l = []
|
|
for d in l:
|
|
if not os.path.isdir(os.path.join(shared_dir, d)) or d == b'.hg':
|
|
continue
|
|
gitm = os.path.join(shared_dir, d, b'marks-git')
|
|
hgm = os.path.join(shared_dir, d, b'marks-hg')
|
|
for p in ((shared_gm, gitm), (shared_hgm, hgm)):
|
|
if os.path.exists(p[0]):
|
|
shutil.copyfile(p[0], p[1])
|
|
# try to run helper gc
|
|
# only really important for a local repo (without proxy)
|
|
warn(b'seeded marks of %s with shared; performing gc' % d)
|
|
try:
|
|
subprocess.check_call(['git-hg-helper', 'gc', '--check-hg', d],
|
|
stdout=sys.stderr)
|
|
except:
|
|
warn(b'gc for %s failed' % d)
|
|
for p in (shared_gm, shared_hgm):
|
|
if os.path.exists(p):
|
|
os.remove(p)
|
|
if shared_marks:
|
|
# force proxy for local repo
|
|
os.environ['GIT_REMOTE_HG_TEST_REMOTE'] = 'y'
|
|
return shared_dir
|
|
return dirname
|
|
|
|
def main(args):
|
|
global prefix, gitdir, dirname, branches, bmarks
|
|
global marks, blob_marks, parsed_refs
|
|
global peer, mode, bad_mail, bad_name
|
|
global track_branches, force_push, is_tmp
|
|
global parsed_tags
|
|
global filenodes
|
|
global fake_bmark, hg_version
|
|
global dry_run
|
|
global notes, alias
|
|
global capability_push
|
|
global remove_username_quotes
|
|
global marksdir
|
|
|
|
marks = None
|
|
is_tmp = False
|
|
gitdir = compat.getenv(b'GIT_DIR', None)
|
|
|
|
if len(args) < 3:
|
|
die('Not enough arguments.')
|
|
|
|
if not gitdir:
|
|
die('GIT_DIR not set')
|
|
|
|
alias = args[1]
|
|
url = args[2]
|
|
peer = None
|
|
|
|
hg_git_compat = get_config_bool('remote-hg.hg-git-compat')
|
|
track_branches = get_config_bool('remote-hg.track-branches', True)
|
|
capability_push = get_config_bool('remote-hg.capability-push', True)
|
|
remove_username_quotes = get_config_bool('remote-hg.remove-username-quotes', True)
|
|
force_push = False
|
|
|
|
if hg_git_compat:
|
|
mode = 'hg'
|
|
bad_mail = b'none@none'
|
|
bad_name = b''
|
|
else:
|
|
mode = 'git'
|
|
bad_mail = b'unknown'
|
|
bad_name = b'Unknown'
|
|
|
|
if alias[4:] == url:
|
|
is_tmp = True
|
|
alias = compat.to_b(hashlib.sha1(alias).hexdigest())
|
|
|
|
dirname = os.path.join(gitdir, b'hg', alias)
|
|
branches = {}
|
|
bmarks = {}
|
|
blob_marks = {}
|
|
filenodes = {}
|
|
fake_bmark = None
|
|
try:
|
|
version, _, extra = util.version().partition(b'+')
|
|
version = list(int(e) for e in version.split(b'.'))
|
|
if extra:
|
|
version[-1] += 1
|
|
hg_version = tuple(version)
|
|
except:
|
|
hg_version = None
|
|
dry_run = False
|
|
notes = set()
|
|
|
|
if not capability_push:
|
|
warn('capability_push is disabled, only do so when really sure')
|
|
warn('various enhanced features might fail in subtle ways')
|
|
|
|
prefix = select_private_refs(alias)
|
|
marksdir = select_marks_dir(alias, gitdir, True)
|
|
repo, branchmap = get_repo(url, alias)
|
|
|
|
if not is_tmp:
|
|
fix_path(alias, peer or repo, url)
|
|
|
|
marks_path = os.path.join(marksdir, b'marks-hg')
|
|
marks = Marks(marks_path)
|
|
|
|
if sys.platform == 'win32':
|
|
import msvcrt
|
|
msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
|
|
|
|
parser = Parser(repo)
|
|
for line in parser:
|
|
if parser.check(b'capabilities'):
|
|
do_capabilities(parser)
|
|
elif parser.check(b'list'):
|
|
do_list(parser, branchmap)
|
|
elif parser.check(b'import'):
|
|
do_import(parser)
|
|
elif parser.check(b'export'):
|
|
do_export(parser)
|
|
elif parser.check(b'push'):
|
|
do_push(parser)
|
|
elif parser.check(b'option'):
|
|
do_option(parser)
|
|
else:
|
|
die(b'unhandled command: %s' % line)
|
|
compat.stdout.flush()
|
|
|
|
marks.store()
|
|
|
|
def bye():
|
|
if is_tmp:
|
|
shutil.rmtree(dirname)
|
|
|
|
if __name__ == '__main__':
|
|
atexit.register(bye)
|
|
sys.exit(main([compat.decode_sysarg(a) for a in sys.argv]))
|