| 1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222 |
- # -*- coding:utf-8 -*-
- #
- # Copyright (C) 2008 The Android Open Source Project
- #
- # Licensed under the Apache License, Version 2.0 (the "License");
- # you may not use this file except in compliance with the License.
- # You may obtain a copy of the License at
- #
- # http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- from __future__ import print_function
- import json
- import netrc
- from optparse import SUPPRESS_HELP
- import os
- import re
- import socket
- import subprocess
- import sys
- import tempfile
- import time
- from pyversion import is_python3
- if is_python3():
- import http.cookiejar as cookielib
- import urllib.error
- import urllib.parse
- import urllib.request
- import xmlrpc.client
- else:
- import cookielib
- import imp
- import urllib2
- import urlparse
- import xmlrpclib
- urllib = imp.new_module('urllib')
- urllib.error = urllib2
- urllib.parse = urlparse
- urllib.request = urllib2
- xmlrpc = imp.new_module('xmlrpc')
- xmlrpc.client = xmlrpclib
- try:
- import threading as _threading
- except ImportError:
- import dummy_threading as _threading
- try:
- import resource
- def _rlimit_nofile():
- return resource.getrlimit(resource.RLIMIT_NOFILE)
- except ImportError:
- def _rlimit_nofile():
- return (256, 256)
- try:
- import multiprocessing
- except ImportError:
- multiprocessing = None
- import event_log
- from git_command import GIT, git_require
- from git_config import GetUrlCookieFile
- from git_refs import R_HEADS, HEAD
- import gitc_utils
- from project import Project
- from project import RemoteSpec
- from command import Command, MirrorSafeCommand
- from error import RepoChangedException, GitError, ManifestParseError
- import platform_utils
- from project import SyncBuffer
- from progress import Progress
- from wrapper import Wrapper
- from manifest_xml import GitcManifest
- _ONE_DAY_S = 24 * 60 * 60
- class _FetchError(Exception):
- """Internal error thrown in _FetchHelper() when we don't want stack trace."""
- pass
- class _CheckoutError(Exception):
- """Internal error thrown in _CheckoutOne() when we don't want stack trace."""
- class Sync(Command, MirrorSafeCommand):
- jobs = 1
- common = True
- helpSummary = "Update working tree to the latest revision"
- helpUsage = """
- %prog [<project>...]
- """
- helpDescription = """
- The '%prog' command synchronizes local project directories
- with the remote repositories specified in the manifest. If a local
- project does not yet exist, it will clone a new local directory from
- the remote repository and set up tracking branches as specified in
- the manifest. If the local project already exists, '%prog'
- will update the remote branches and rebase any new local changes
- on top of the new remote changes.
- '%prog' will synchronize all projects listed at the command
- line. Projects can be specified either by name, or by a relative
- or absolute path to the project's local directory. If no projects
- are specified, '%prog' will synchronize all projects listed in
- the manifest.
- The -d/--detach option can be used to switch specified projects
- back to the manifest revision. This option is especially helpful
- if the project is currently on a topic branch, but the manifest
- revision is temporarily needed.
- The -s/--smart-sync option can be used to sync to a known good
- build as specified by the manifest-server element in the current
- manifest. The -t/--smart-tag option is similar and allows you to
- specify a custom tag/label.
- The -u/--manifest-server-username and -p/--manifest-server-password
- options can be used to specify a username and password to authenticate
- with the manifest server when using the -s or -t option.
- If -u and -p are not specified when using the -s or -t option, '%prog'
- will attempt to read authentication credentials for the manifest server
- from the user's .netrc file.
- '%prog' will not use authentication credentials from -u/-p or .netrc
- if the manifest server specified in the manifest file already includes
- credentials.
- By default, all projects will be synced. The --fail-fast option can be used
- to halt syncing as soon as possible when the the first project fails to sync.
- The --force-sync option can be used to overwrite existing git
- directories if they have previously been linked to a different
- object direcotry. WARNING: This may cause data to be lost since
- refs may be removed when overwriting.
- The --force-remove-dirty option can be used to remove previously used
- projects with uncommitted changes. WARNING: This may cause data to be
- lost since uncommitted changes may be removed with projects that no longer
- exist in the manifest.
- The --no-clone-bundle option disables any attempt to use
- $URL/clone.bundle to bootstrap a new Git repository from a
- resumeable bundle file on a content delivery network. This
- may be necessary if there are problems with the local Python
- HTTP client or proxy configuration, but the Git binary works.
- The --fetch-submodules option enables fetching Git submodules
- of a project from server.
- The -c/--current-branch option can be used to only fetch objects that
- are on the branch specified by a project's revision.
- The --optimized-fetch option can be used to only fetch projects that
- are fixed to a sha1 revision if the sha1 revision does not already
- exist locally.
- The --prune option can be used to remove any refs that no longer
- exist on the remote.
- # SSH Connections
- If at least one project remote URL uses an SSH connection (ssh://,
- git+ssh://, or user@host:path syntax) repo will automatically
- enable the SSH ControlMaster option when connecting to that host.
- This feature permits other projects in the same '%prog' session to
- reuse the same SSH tunnel, saving connection setup overheads.
- To disable this behavior on UNIX platforms, set the GIT_SSH
- environment variable to 'ssh'. For example:
- export GIT_SSH=ssh
- %prog
- # Compatibility
- This feature is automatically disabled on Windows, due to the lack
- of UNIX domain socket support.
- This feature is not compatible with url.insteadof rewrites in the
- user's ~/.gitconfig. '%prog' is currently not able to perform the
- rewrite early enough to establish the ControlMaster tunnel.
- If the remote SSH daemon is Gerrit Code Review, version 2.0.10 or
- later is required to fix a server side protocol bug.
- """
- def _Options(self, p, show_smart=True):
- try:
- self.jobs = self.manifest.default.sync_j
- except ManifestParseError:
- self.jobs = 1
- p.add_option('-f', '--force-broken',
- dest='force_broken', action='store_true',
- help='obsolete option (to be deleted in the future)')
- p.add_option('--fail-fast',
- dest='fail_fast', action='store_true',
- help='stop syncing after first error is hit')
- p.add_option('--force-sync',
- dest='force_sync', action='store_true',
- help="overwrite an existing git directory if it needs to "
- "point to a different object directory. WARNING: this "
- "may cause loss of data")
- p.add_option('--force-remove-dirty',
- dest='force_remove_dirty', action='store_true',
- help="force remove projects with uncommitted modifications if "
- "projects no longer exist in the manifest. "
- "WARNING: this may cause loss of data")
- p.add_option('-l', '--local-only',
- dest='local_only', action='store_true',
- help="only update working tree, don't fetch")
- p.add_option('-n', '--network-only',
- dest='network_only', action='store_true',
- help="fetch only, don't update working tree")
- p.add_option('-d', '--detach',
- dest='detach_head', action='store_true',
- help='detach projects back to manifest revision')
- p.add_option('-c', '--current-branch',
- dest='current_branch_only', action='store_true',
- help='fetch only current branch from server')
- p.add_option('-q', '--quiet',
- dest='quiet', action='store_true',
- help='be more quiet')
- p.add_option('-j', '--jobs',
- dest='jobs', action='store', type='int',
- help="projects to fetch simultaneously (default %d)" % self.jobs)
- p.add_option('-m', '--manifest-name',
- dest='manifest_name',
- help='temporary manifest to use for this sync', metavar='NAME.xml')
- p.add_option('--no-clone-bundle',
- dest='no_clone_bundle', action='store_true',
- help='disable use of /clone.bundle on HTTP/HTTPS')
- p.add_option('-u', '--manifest-server-username', action='store',
- dest='manifest_server_username',
- help='username to authenticate with the manifest server')
- p.add_option('-p', '--manifest-server-password', action='store',
- dest='manifest_server_password',
- help='password to authenticate with the manifest server')
- p.add_option('--fetch-submodules',
- dest='fetch_submodules', action='store_true',
- help='fetch submodules from server')
- p.add_option('--no-tags',
- dest='no_tags', action='store_true',
- help="don't fetch tags")
- p.add_option('--optimized-fetch',
- dest='optimized_fetch', action='store_true',
- help='only fetch projects fixed to sha1 if revision does not exist locally')
- p.add_option('--prune', dest='prune', action='store_true',
- help='delete refs that no longer exist on the remote')
- if show_smart:
- p.add_option('-s', '--smart-sync',
- dest='smart_sync', action='store_true',
- help='smart sync using manifest from the latest known good build')
- p.add_option('-t', '--smart-tag',
- dest='smart_tag', action='store',
- help='smart sync using manifest from a known tag')
- g = p.add_option_group('repo Version options')
- g.add_option('--no-repo-verify',
- dest='no_repo_verify', action='store_true',
- help='do not verify repo source code')
- g.add_option('--repo-upgraded',
- dest='repo_upgraded', action='store_true',
- help=SUPPRESS_HELP)
- def _FetchProjectList(self, opt, projects, sem, *args, **kwargs):
- """Main function of the fetch threads.
- Delegates most of the work to _FetchHelper.
- Args:
- opt: Program options returned from optparse. See _Options().
- projects: Projects to fetch.
- sem: We'll release() this semaphore when we exit so that another thread
- can be started up.
- *args, **kwargs: Remaining arguments to pass to _FetchHelper. See the
- _FetchHelper docstring for details.
- """
- try:
- for project in projects:
- success = self._FetchHelper(opt, project, *args, **kwargs)
- if not success and opt.fail_fast:
- break
- finally:
- sem.release()
- def _FetchHelper(self, opt, project, lock, fetched, pm, err_event,
- clone_filter):
- """Fetch git objects for a single project.
- Args:
- opt: Program options returned from optparse. See _Options().
- project: Project object for the project to fetch.
- lock: Lock for accessing objects that are shared amongst multiple
- _FetchHelper() threads.
- fetched: set object that we will add project.gitdir to when we're done
- (with our lock held).
- pm: Instance of a Project object. We will call pm.update() (with our
- lock held).
- err_event: We'll set this event in the case of an error (after printing
- out info about the error).
- clone_filter: Filter for use in a partial clone.
- Returns:
- Whether the fetch was successful.
- """
- # We'll set to true once we've locked the lock.
- did_lock = False
- # Encapsulate everything in a try/except/finally so that:
- # - We always set err_event in the case of an exception.
- # - We always make sure we unlock the lock if we locked it.
- start = time.time()
- success = False
- try:
- try:
- success = project.Sync_NetworkHalf(
- quiet=opt.quiet,
- current_branch_only=opt.current_branch_only,
- force_sync=opt.force_sync,
- clone_bundle=not opt.no_clone_bundle,
- no_tags=opt.no_tags, archive=self.manifest.IsArchive,
- optimized_fetch=opt.optimized_fetch,
- prune=opt.prune,
- clone_filter=clone_filter)
- self._fetch_times.Set(project, time.time() - start)
- # Lock around all the rest of the code, since printing, updating a set
- # and Progress.update() are not thread safe.
- lock.acquire()
- did_lock = True
- if not success:
- err_event.set()
- print('error: Cannot fetch %s from %s'
- % (project.name, project.remote.url),
- file=sys.stderr)
- if opt.fail_fast:
- raise _FetchError()
- fetched.add(project.gitdir)
- pm.update(msg=project.name)
- except _FetchError:
- pass
- except Exception as e:
- print('error: Cannot fetch %s (%s: %s)' \
- % (project.name, type(e).__name__, str(e)), file=sys.stderr)
- err_event.set()
- raise
- finally:
- if did_lock:
- lock.release()
- finish = time.time()
- self.event_log.AddSync(project, event_log.TASK_SYNC_NETWORK,
- start, finish, success)
- return success
- def _Fetch(self, projects, opt):
- fetched = set()
- lock = _threading.Lock()
- pm = Progress('Fetching projects', len(projects),
- always_print_percentage=opt.quiet)
- objdir_project_map = dict()
- for project in projects:
- objdir_project_map.setdefault(project.objdir, []).append(project)
- threads = set()
- sem = _threading.Semaphore(self.jobs)
- err_event = _threading.Event()
- for project_list in objdir_project_map.values():
- # Check for any errors before running any more tasks.
- # ...we'll let existing threads finish, though.
- if err_event.isSet() and opt.fail_fast:
- break
- sem.acquire()
- kwargs = dict(opt=opt,
- projects=project_list,
- sem=sem,
- lock=lock,
- fetched=fetched,
- pm=pm,
- err_event=err_event,
- clone_filter=self.manifest.CloneFilter)
- if self.jobs > 1:
- t = _threading.Thread(target = self._FetchProjectList,
- kwargs = kwargs)
- # Ensure that Ctrl-C will not freeze the repo process.
- t.daemon = True
- threads.add(t)
- t.start()
- else:
- self._FetchProjectList(**kwargs)
- for t in threads:
- t.join()
- # If we saw an error, exit with code 1 so that other scripts can check.
- if err_event.isSet() and opt.fail_fast:
- print('\nerror: Exited sync due to fetch errors', file=sys.stderr)
- sys.exit(1)
- pm.end()
- self._fetch_times.Save()
- if not self.manifest.IsArchive:
- self._GCProjects(projects)
- return fetched
- def _CheckoutWorker(self, opt, sem, project, *args, **kwargs):
- """Main function of the fetch threads.
- Delegates most of the work to _CheckoutOne.
- Args:
- opt: Program options returned from optparse. See _Options().
- projects: Projects to fetch.
- sem: We'll release() this semaphore when we exit so that another thread
- can be started up.
- *args, **kwargs: Remaining arguments to pass to _CheckoutOne. See the
- _CheckoutOne docstring for details.
- """
- try:
- return self._CheckoutOne(opt, project, *args, **kwargs)
- finally:
- sem.release()
- def _CheckoutOne(self, opt, project, lock, pm, err_event, err_results):
- """Checkout work tree for one project
- Args:
- opt: Program options returned from optparse. See _Options().
- project: Project object for the project to checkout.
- lock: Lock for accessing objects that are shared amongst multiple
- _CheckoutWorker() threads.
- pm: Instance of a Project object. We will call pm.update() (with our
- lock held).
- err_event: We'll set this event in the case of an error (after printing
- out info about the error).
- err_results: A list of strings, paths to git repos where checkout
- failed.
- Returns:
- Whether the fetch was successful.
- """
- # We'll set to true once we've locked the lock.
- did_lock = False
- # Encapsulate everything in a try/except/finally so that:
- # - We always set err_event in the case of an exception.
- # - We always make sure we unlock the lock if we locked it.
- start = time.time()
- syncbuf = SyncBuffer(self.manifest.manifestProject.config,
- detach_head=opt.detach_head)
- success = False
- try:
- try:
- project.Sync_LocalHalf(syncbuf, force_sync=opt.force_sync)
- # Lock around all the rest of the code, since printing, updating a set
- # and Progress.update() are not thread safe.
- lock.acquire()
- success = syncbuf.Finish()
- did_lock = True
- if not success:
- err_event.set()
- print('error: Cannot checkout %s' % (project.name),
- file=sys.stderr)
- raise _CheckoutError()
- pm.update(msg=project.name)
- except _CheckoutError:
- pass
- except Exception as e:
- print('error: Cannot checkout %s: %s: %s' %
- (project.name, type(e).__name__, str(e)),
- file=sys.stderr)
- err_event.set()
- raise
- finally:
- if did_lock:
- if not success:
- err_results.append(project.relpath)
- lock.release()
- finish = time.time()
- self.event_log.AddSync(project, event_log.TASK_SYNC_LOCAL,
- start, finish, success)
- return success
- def _Checkout(self, all_projects, opt):
- """Checkout projects listed in all_projects
- Args:
- all_projects: List of all projects that should be checked out.
- opt: Program options returned from optparse. See _Options().
- """
- # Perform checkouts in multiple threads when we are using partial clone.
- # Without partial clone, all needed git objects are already downloaded,
- # in this situation it's better to use only one process because the checkout
- # would be mostly disk I/O; with partial clone, the objects are only
- # downloaded when demanded (at checkout time), which is similar to the
- # Sync_NetworkHalf case and parallelism would be helpful.
- if self.manifest.CloneFilter:
- syncjobs = self.jobs
- else:
- syncjobs = 1
- lock = _threading.Lock()
- pm = Progress('Checking out projects', len(all_projects))
- threads = set()
- sem = _threading.Semaphore(syncjobs)
- err_event = _threading.Event()
- err_results = []
- for project in all_projects:
- # Check for any errors before running any more tasks.
- # ...we'll let existing threads finish, though.
- if err_event.isSet() and opt.fail_fast:
- break
- sem.acquire()
- if project.worktree:
- kwargs = dict(opt=opt,
- sem=sem,
- project=project,
- lock=lock,
- pm=pm,
- err_event=err_event,
- err_results=err_results)
- if syncjobs > 1:
- t = _threading.Thread(target=self._CheckoutWorker,
- kwargs=kwargs)
- # Ensure that Ctrl-C will not freeze the repo process.
- t.daemon = True
- threads.add(t)
- t.start()
- else:
- self._CheckoutWorker(**kwargs)
- for t in threads:
- t.join()
- pm.end()
- # If we saw an error, exit with code 1 so that other scripts can check.
- if err_event.isSet():
- print('\nerror: Exited sync due to checkout errors', file=sys.stderr)
- if err_results:
- print('Failing repos:\n%s' % '\n'.join(err_results),
- file=sys.stderr)
- sys.exit(1)
- def _GCProjects(self, projects):
- gc_gitdirs = {}
- for project in projects:
- if len(project.manifest.GetProjectsWithName(project.name)) > 1:
- print('Shared project %s found, disabling pruning.' % project.name)
- project.bare_git.config('--replace-all', 'gc.pruneExpire', 'never')
- gc_gitdirs[project.gitdir] = project.bare_git
- has_dash_c = git_require((1, 7, 2))
- if multiprocessing and has_dash_c:
- cpu_count = multiprocessing.cpu_count()
- else:
- cpu_count = 1
- jobs = min(self.jobs, cpu_count)
- if jobs < 2:
- for bare_git in gc_gitdirs.values():
- bare_git.gc('--auto')
- return
- config = {'pack.threads': cpu_count // jobs if cpu_count > jobs else 1}
- threads = set()
- sem = _threading.Semaphore(jobs)
- err_event = _threading.Event()
- def GC(bare_git):
- try:
- try:
- bare_git.gc('--auto', config=config)
- except GitError:
- err_event.set()
- except:
- err_event.set()
- raise
- finally:
- sem.release()
- for bare_git in gc_gitdirs.values():
- if err_event.isSet():
- break
- sem.acquire()
- t = _threading.Thread(target=GC, args=(bare_git,))
- t.daemon = True
- threads.add(t)
- t.start()
- for t in threads:
- t.join()
- if err_event.isSet():
- print('\nerror: Exited sync due to gc errors', file=sys.stderr)
- sys.exit(1)
- def _ReloadManifest(self, manifest_name=None):
- if manifest_name:
- # Override calls _Unload already
- self.manifest.Override(manifest_name)
- else:
- self.manifest._Unload()
- def _DeleteProject(self, path):
- print('Deleting obsolete path %s' % path, file=sys.stderr)
- # Delete the .git directory first, so we're less likely to have a partially
- # working git repository around. There shouldn't be any git projects here,
- # so rmtree works.
- try:
- platform_utils.rmtree(os.path.join(path, '.git'))
- except OSError as e:
- print('Failed to remove %s (%s)' % (os.path.join(path, '.git'), str(e)), file=sys.stderr)
- print('error: Failed to delete obsolete path %s' % path, file=sys.stderr)
- print(' remove manually, then run sync again', file=sys.stderr)
- return 1
- # Delete everything under the worktree, except for directories that contain
- # another git project
- dirs_to_remove = []
- failed = False
- for root, dirs, files in platform_utils.walk(path):
- for f in files:
- try:
- platform_utils.remove(os.path.join(root, f))
- except OSError as e:
- print('Failed to remove %s (%s)' % (os.path.join(root, f), str(e)), file=sys.stderr)
- failed = True
- dirs[:] = [d for d in dirs
- if not os.path.lexists(os.path.join(root, d, '.git'))]
- dirs_to_remove += [os.path.join(root, d) for d in dirs
- if os.path.join(root, d) not in dirs_to_remove]
- for d in reversed(dirs_to_remove):
- if platform_utils.islink(d):
- try:
- platform_utils.remove(d)
- except OSError as e:
- print('Failed to remove %s (%s)' % (os.path.join(root, d), str(e)), file=sys.stderr)
- failed = True
- elif len(platform_utils.listdir(d)) == 0:
- try:
- platform_utils.rmdir(d)
- except OSError as e:
- print('Failed to remove %s (%s)' % (os.path.join(root, d), str(e)), file=sys.stderr)
- failed = True
- continue
- if failed:
- print('error: Failed to delete obsolete path %s' % path, file=sys.stderr)
- print(' remove manually, then run sync again', file=sys.stderr)
- return 1
- # Try deleting parent dirs if they are empty
- project_dir = path
- while project_dir != self.manifest.topdir:
- if len(platform_utils.listdir(project_dir)) == 0:
- platform_utils.rmdir(project_dir)
- else:
- break
- project_dir = os.path.dirname(project_dir)
- return 0
- def UpdateProjectList(self, opt):
- new_project_paths = []
- for project in self.GetProjects(None, missing_ok=True):
- if project.relpath:
- new_project_paths.append(project.relpath)
- file_name = 'project.list'
- file_path = os.path.join(self.manifest.repodir, file_name)
- old_project_paths = []
- if os.path.exists(file_path):
- with open(file_path, 'r') as fd:
- old_project_paths = fd.read().split('\n')
- # In reversed order, so subfolders are deleted before parent folder.
- for path in sorted(old_project_paths, reverse=True):
- if not path:
- continue
- if path not in new_project_paths:
- # If the path has already been deleted, we don't need to do it
- gitdir = os.path.join(self.manifest.topdir, path, '.git')
- if os.path.exists(gitdir):
- project = Project(
- manifest = self.manifest,
- name = path,
- remote = RemoteSpec('origin'),
- gitdir = gitdir,
- objdir = gitdir,
- worktree = os.path.join(self.manifest.topdir, path),
- relpath = path,
- revisionExpr = 'HEAD',
- revisionId = None,
- groups = None)
- if project.IsDirty() and opt.force_remove_dirty:
- print('WARNING: Removing dirty project "%s": uncommitted changes '
- 'erased' % project.relpath, file=sys.stderr)
- self._DeleteProject(project.worktree)
- elif project.IsDirty():
- print('error: Cannot remove project "%s": uncommitted changes '
- 'are present' % project.relpath, file=sys.stderr)
- print(' commit changes, then run sync again',
- file=sys.stderr)
- return 1
- elif self._DeleteProject(project.worktree):
- return 1
- new_project_paths.sort()
- with open(file_path, 'w') as fd:
- fd.write('\n'.join(new_project_paths))
- fd.write('\n')
- return 0
- def _SmartSyncSetup(self, opt, smart_sync_manifest_path):
- if not self.manifest.manifest_server:
- print('error: cannot smart sync: no manifest server defined in '
- 'manifest', file=sys.stderr)
- sys.exit(1)
- manifest_server = self.manifest.manifest_server
- if not opt.quiet:
- print('Using manifest server %s' % manifest_server)
- if not '@' in manifest_server:
- username = None
- password = None
- if opt.manifest_server_username and opt.manifest_server_password:
- username = opt.manifest_server_username
- password = opt.manifest_server_password
- else:
- try:
- info = netrc.netrc()
- except IOError:
- # .netrc file does not exist or could not be opened
- pass
- else:
- try:
- parse_result = urllib.parse.urlparse(manifest_server)
- if parse_result.hostname:
- auth = info.authenticators(parse_result.hostname)
- if auth:
- username, _account, password = auth
- else:
- print('No credentials found for %s in .netrc'
- % parse_result.hostname, file=sys.stderr)
- except netrc.NetrcParseError as e:
- print('Error parsing .netrc file: %s' % e, file=sys.stderr)
- if (username and password):
- manifest_server = manifest_server.replace('://', '://%s:%s@' %
- (username, password),
- 1)
- transport = PersistentTransport(manifest_server)
- if manifest_server.startswith('persistent-'):
- manifest_server = manifest_server[len('persistent-'):]
- try:
- server = xmlrpc.client.Server(manifest_server, transport=transport)
- if opt.smart_sync:
- p = self.manifest.manifestProject
- b = p.GetBranch(p.CurrentBranch)
- branch = b.merge
- if branch.startswith(R_HEADS):
- branch = branch[len(R_HEADS):]
- env = os.environ.copy()
- if 'SYNC_TARGET' in env:
- target = env['SYNC_TARGET']
- [success, manifest_str] = server.GetApprovedManifest(branch, target)
- elif 'TARGET_PRODUCT' in env and 'TARGET_BUILD_VARIANT' in env:
- target = '%s-%s' % (env['TARGET_PRODUCT'],
- env['TARGET_BUILD_VARIANT'])
- [success, manifest_str] = server.GetApprovedManifest(branch, target)
- else:
- [success, manifest_str] = server.GetApprovedManifest(branch)
- else:
- assert(opt.smart_tag)
- [success, manifest_str] = server.GetManifest(opt.smart_tag)
- if success:
- manifest_name = os.path.basename(smart_sync_manifest_path)
- try:
- with open(smart_sync_manifest_path, 'w') as f:
- f.write(manifest_str)
- except IOError as e:
- print('error: cannot write manifest to %s:\n%s'
- % (smart_sync_manifest_path, e),
- file=sys.stderr)
- sys.exit(1)
- self._ReloadManifest(manifest_name)
- else:
- print('error: manifest server RPC call failed: %s' %
- manifest_str, file=sys.stderr)
- sys.exit(1)
- except (socket.error, IOError, xmlrpc.client.Fault) as e:
- print('error: cannot connect to manifest server %s:\n%s'
- % (self.manifest.manifest_server, e), file=sys.stderr)
- sys.exit(1)
- except xmlrpc.client.ProtocolError as e:
- print('error: cannot connect to manifest server %s:\n%d %s'
- % (self.manifest.manifest_server, e.errcode, e.errmsg),
- file=sys.stderr)
- sys.exit(1)
- return manifest_name
- def _UpdateManifestProject(self, opt, mp, manifest_name):
- """Fetch & update the local manifest project."""
- if not opt.local_only:
- start = time.time()
- success = mp.Sync_NetworkHalf(quiet=opt.quiet,
- current_branch_only=opt.current_branch_only,
- no_tags=opt.no_tags,
- optimized_fetch=opt.optimized_fetch,
- submodules=self.manifest.HasSubmodules,
- clone_filter=self.manifest.CloneFilter)
- finish = time.time()
- self.event_log.AddSync(mp, event_log.TASK_SYNC_NETWORK,
- start, finish, success)
- if mp.HasChanges:
- syncbuf = SyncBuffer(mp.config)
- start = time.time()
- mp.Sync_LocalHalf(syncbuf, submodules=self.manifest.HasSubmodules)
- clean = syncbuf.Finish()
- self.event_log.AddSync(mp, event_log.TASK_SYNC_LOCAL,
- start, time.time(), clean)
- if not clean:
- sys.exit(1)
- self._ReloadManifest(opt.manifest_name)
- if opt.jobs is None:
- self.jobs = self.manifest.default.sync_j
- def ValidateOptions(self, opt, args):
- if opt.force_broken:
- print('warning: -f/--force-broken is now the default behavior, and the '
- 'options are deprecated', file=sys.stderr)
- if opt.network_only and opt.detach_head:
- self.OptionParser.error('cannot combine -n and -d')
- if opt.network_only and opt.local_only:
- self.OptionParser.error('cannot combine -n and -l')
- if opt.manifest_name and opt.smart_sync:
- self.OptionParser.error('cannot combine -m and -s')
- if opt.manifest_name and opt.smart_tag:
- self.OptionParser.error('cannot combine -m and -t')
- if opt.manifest_server_username or opt.manifest_server_password:
- if not (opt.smart_sync or opt.smart_tag):
- self.OptionParser.error('-u and -p may only be combined with -s or -t')
- if None in [opt.manifest_server_username, opt.manifest_server_password]:
- self.OptionParser.error('both -u and -p must be given')
- def Execute(self, opt, args):
- if opt.jobs:
- self.jobs = opt.jobs
- if self.jobs > 1:
- soft_limit, _ = _rlimit_nofile()
- self.jobs = min(self.jobs, (soft_limit - 5) // 3)
- if opt.manifest_name:
- self.manifest.Override(opt.manifest_name)
- manifest_name = opt.manifest_name
- smart_sync_manifest_path = os.path.join(
- self.manifest.manifestProject.worktree, 'smart_sync_override.xml')
- if opt.smart_sync or opt.smart_tag:
- manifest_name = self._SmartSyncSetup(opt, smart_sync_manifest_path)
- else:
- if os.path.isfile(smart_sync_manifest_path):
- try:
- platform_utils.remove(smart_sync_manifest_path)
- except OSError as e:
- print('error: failed to remove existing smart sync override manifest: %s' %
- e, file=sys.stderr)
- rp = self.manifest.repoProject
- rp.PreSync()
- mp = self.manifest.manifestProject
- mp.PreSync()
- if opt.repo_upgraded:
- _PostRepoUpgrade(self.manifest, quiet=opt.quiet)
- self._UpdateManifestProject(opt, mp, manifest_name)
- if self.gitc_manifest:
- gitc_manifest_projects = self.GetProjects(args,
- missing_ok=True)
- gitc_projects = []
- opened_projects = []
- for project in gitc_manifest_projects:
- if project.relpath in self.gitc_manifest.paths and \
- self.gitc_manifest.paths[project.relpath].old_revision:
- opened_projects.append(project.relpath)
- else:
- gitc_projects.append(project.relpath)
- if not args:
- gitc_projects = None
- if gitc_projects != [] and not opt.local_only:
- print('Updating GITC client: %s' % self.gitc_manifest.gitc_client_name)
- manifest = GitcManifest(self.repodir, self.gitc_manifest.gitc_client_name)
- if manifest_name:
- manifest.Override(manifest_name)
- else:
- manifest.Override(self.manifest.manifestFile)
- gitc_utils.generate_gitc_manifest(self.gitc_manifest,
- manifest,
- gitc_projects)
- print('GITC client successfully synced.')
- # The opened projects need to be synced as normal, therefore we
- # generate a new args list to represent the opened projects.
- # TODO: make this more reliable -- if there's a project name/path overlap,
- # this may choose the wrong project.
- args = [os.path.relpath(self.manifest.paths[path].worktree, os.getcwd())
- for path in opened_projects]
- if not args:
- return
- all_projects = self.GetProjects(args,
- missing_ok=True,
- submodules_ok=opt.fetch_submodules)
- self._fetch_times = _FetchTimes(self.manifest)
- if not opt.local_only:
- to_fetch = []
- now = time.time()
- if _ONE_DAY_S <= (now - rp.LastFetch):
- to_fetch.append(rp)
- to_fetch.extend(all_projects)
- to_fetch.sort(key=self._fetch_times.Get, reverse=True)
- fetched = self._Fetch(to_fetch, opt)
- _PostRepoFetch(rp, opt.no_repo_verify)
- if opt.network_only:
- # bail out now; the rest touches the working tree
- return
- # Iteratively fetch missing and/or nested unregistered submodules
- previously_missing_set = set()
- while True:
- self._ReloadManifest(manifest_name)
- all_projects = self.GetProjects(args,
- missing_ok=True,
- submodules_ok=opt.fetch_submodules)
- missing = []
- for project in all_projects:
- if project.gitdir not in fetched:
- missing.append(project)
- if not missing:
- break
- # Stop us from non-stopped fetching actually-missing repos: If set of
- # missing repos has not been changed from last fetch, we break.
- missing_set = set(p.name for p in missing)
- if previously_missing_set == missing_set:
- break
- previously_missing_set = missing_set
- fetched.update(self._Fetch(missing, opt))
- if self.manifest.IsMirror or self.manifest.IsArchive:
- # bail out now, we have no working tree
- return
- if self.UpdateProjectList(opt):
- sys.exit(1)
- self._Checkout(all_projects, opt)
- # If there's a notice that's supposed to print at the end of the sync, print
- # it now...
- if self.manifest.notice:
- print(self.manifest.notice)
- def _PostRepoUpgrade(manifest, quiet=False):
- wrapper = Wrapper()
- if wrapper.NeedSetupGnuPG():
- wrapper.SetupGnuPG(quiet)
- for project in manifest.projects:
- if project.Exists:
- project.PostRepoUpgrade()
- def _PostRepoFetch(rp, no_repo_verify=False, verbose=False):
- if rp.HasChanges:
- print('info: A new version of repo is available', file=sys.stderr)
- print(file=sys.stderr)
- if no_repo_verify or _VerifyTag(rp):
- syncbuf = SyncBuffer(rp.config)
- rp.Sync_LocalHalf(syncbuf)
- if not syncbuf.Finish():
- sys.exit(1)
- print('info: Restarting repo with latest version', file=sys.stderr)
- raise RepoChangedException(['--repo-upgraded'])
- else:
- print('warning: Skipped upgrade to unverified version', file=sys.stderr)
- else:
- if verbose:
- print('repo version %s is current' % rp.work_git.describe(HEAD),
- file=sys.stderr)
- def _VerifyTag(project):
- gpg_dir = os.path.expanduser('~/.repoconfig/gnupg')
- if not os.path.exists(gpg_dir):
- print('warning: GnuPG was not available during last "repo init"\n'
- 'warning: Cannot automatically authenticate repo."""',
- file=sys.stderr)
- return True
- try:
- cur = project.bare_git.describe(project.GetRevisionId())
- except GitError:
- cur = None
- if not cur \
- or re.compile(r'^.*-[0-9]{1,}-g[0-9a-f]{1,}$').match(cur):
- rev = project.revisionExpr
- if rev.startswith(R_HEADS):
- rev = rev[len(R_HEADS):]
- print(file=sys.stderr)
- print("warning: project '%s' branch '%s' is not signed"
- % (project.name, rev), file=sys.stderr)
- return False
- env = os.environ.copy()
- env['GIT_DIR'] = project.gitdir.encode()
- env['GNUPGHOME'] = gpg_dir.encode()
- cmd = [GIT, 'tag', '-v', cur]
- proc = subprocess.Popen(cmd,
- stdout = subprocess.PIPE,
- stderr = subprocess.PIPE,
- env = env)
- out = proc.stdout.read()
- proc.stdout.close()
- err = proc.stderr.read()
- proc.stderr.close()
- if proc.wait() != 0:
- print(file=sys.stderr)
- print(out, file=sys.stderr)
- print(err, file=sys.stderr)
- print(file=sys.stderr)
- return False
- return True
- class _FetchTimes(object):
- _ALPHA = 0.5
- def __init__(self, manifest):
- self._path = os.path.join(manifest.repodir, '.repo_fetchtimes.json')
- self._times = None
- self._seen = set()
- def Get(self, project):
- self._Load()
- return self._times.get(project.name, _ONE_DAY_S)
- def Set(self, project, t):
- self._Load()
- name = project.name
- old = self._times.get(name, t)
- self._seen.add(name)
- a = self._ALPHA
- self._times[name] = (a*t) + ((1-a) * old)
- def _Load(self):
- if self._times is None:
- try:
- with open(self._path) as f:
- self._times = json.load(f)
- except (IOError, ValueError):
- try:
- platform_utils.remove(self._path)
- except OSError:
- pass
- self._times = {}
- def Save(self):
- if self._times is None:
- return
- to_delete = []
- for name in self._times:
- if name not in self._seen:
- to_delete.append(name)
- for name in to_delete:
- del self._times[name]
- try:
- with open(self._path, 'w') as f:
- json.dump(self._times, f, indent=2)
- except (IOError, TypeError):
- try:
- platform_utils.remove(self._path)
- except OSError:
- pass
- # This is a replacement for xmlrpc.client.Transport using urllib2
- # and supporting persistent-http[s]. It cannot change hosts from
- # request to request like the normal transport, the real url
- # is passed during initialization.
- class PersistentTransport(xmlrpc.client.Transport):
- def __init__(self, orig_host):
- self.orig_host = orig_host
- def request(self, host, handler, request_body, verbose=False):
- with GetUrlCookieFile(self.orig_host, not verbose) as (cookiefile, proxy):
- # Python doesn't understand cookies with the #HttpOnly_ prefix
- # Since we're only using them for HTTP, copy the file temporarily,
- # stripping those prefixes away.
- if cookiefile:
- tmpcookiefile = tempfile.NamedTemporaryFile()
- tmpcookiefile.write("# HTTP Cookie File")
- try:
- with open(cookiefile) as f:
- for line in f:
- if line.startswith("#HttpOnly_"):
- line = line[len("#HttpOnly_"):]
- tmpcookiefile.write(line)
- tmpcookiefile.flush()
- cookiejar = cookielib.MozillaCookieJar(tmpcookiefile.name)
- try:
- cookiejar.load()
- except cookielib.LoadError:
- cookiejar = cookielib.CookieJar()
- finally:
- tmpcookiefile.close()
- else:
- cookiejar = cookielib.CookieJar()
- proxyhandler = urllib.request.ProxyHandler
- if proxy:
- proxyhandler = urllib.request.ProxyHandler({
- "http": proxy,
- "https": proxy })
- opener = urllib.request.build_opener(
- urllib.request.HTTPCookieProcessor(cookiejar),
- proxyhandler)
- url = urllib.parse.urljoin(self.orig_host, handler)
- parse_results = urllib.parse.urlparse(url)
- scheme = parse_results.scheme
- if scheme == 'persistent-http':
- scheme = 'http'
- if scheme == 'persistent-https':
- # If we're proxying through persistent-https, use http. The
- # proxy itself will do the https.
- if proxy:
- scheme = 'http'
- else:
- scheme = 'https'
- # Parse out any authentication information using the base class
- host, extra_headers, _ = self.get_host_info(parse_results.netloc)
- url = urllib.parse.urlunparse((
- scheme,
- host,
- parse_results.path,
- parse_results.params,
- parse_results.query,
- parse_results.fragment))
- request = urllib.request.Request(url, request_body)
- if extra_headers is not None:
- for (name, header) in extra_headers:
- request.add_header(name, header)
- request.add_header('Content-Type', 'text/xml')
- try:
- response = opener.open(request)
- except urllib.error.HTTPError as e:
- if e.code == 501:
- # We may have been redirected through a login process
- # but our POST turned into a GET. Retry.
- response = opener.open(request)
- else:
- raise
- p, u = xmlrpc.client.getparser()
- while 1:
- data = response.read(1024)
- if not data:
- break
- p.feed(data)
- p.close()
- return u.close()
- def close(self):
- pass
|