sync: Add support to dump a JSON event log of all sync events.
Change-Id: Id4852968ac1b2bf0093007cf2e5ca951ddab8b3b
diff --git a/command.py b/command.py
index 2ff0a34..971f968 100644
--- a/command.py
+++ b/command.py
@@ -19,6 +19,7 @@
import re
import sys
+from event_log import EventLog
from error import NoSuchProjectError
from error import InvalidProjectGroupsError
@@ -28,6 +29,7 @@
"""
common = False
+ event_log = EventLog()
manifest = None
_optparse = None
diff --git a/event_log.py b/event_log.py
new file mode 100644
index 0000000..d73511d
--- /dev/null
+++ b/event_log.py
@@ -0,0 +1,177 @@
+#
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import json
+import multiprocessing
+
+TASK_COMMAND = 'command'
+TASK_SYNC_NETWORK = 'sync-network'
+TASK_SYNC_LOCAL = 'sync-local'
+
+class EventLog(object):
+ """Event log that records events that occurred during a repo invocation.
+
+ Events are written to the log as a consecutive JSON entries, one per line.
+ Each entry contains the following keys:
+ - id: A ('RepoOp', ID) tuple, suitable for storing in a datastore.
+ The ID is only unique for the invocation of the repo command.
+ - name: Name of the object being operated upon.
+ - task_name: The task that was performed.
+ - start: Timestamp of when the operation started.
+ - finish: Timestamp of when the operation finished.
+ - success: Boolean indicating if the operation was successful.
+ - try_count: A counter indicating the try count of this task.
+
+ Optionally:
+ - parent: A ('RepoOp', ID) tuple indicating the parent event for nested
+ events.
+
+ Valid task_names include:
+ - command: The invocation of a subcommand.
+ - sync-network: The network component of a sync command.
+ - sync-local: The local component of a sync command.
+
+ Specific tasks may include additional informational properties.
+ """
+
+ def __init__(self):
+ """Initializes the event log."""
+ self._log = []
+ self._next_id = _EventIdGenerator()
+ self._parent = None
+
+ def Add(self, name, task_name, start, finish=None, success=None,
+ try_count=1, kind='RepoOp'):
+ """Add an event to the log.
+
+ Args:
+ name: Name of the object being operated upon.
+ task_name: A sub-task that was performed for name.
+ start: Timestamp of when the operation started.
+ finish: Timestamp of when the operation finished.
+ success: Boolean indicating if the operation was successful.
+ try_count: A counter indicating the try count of this task.
+ kind: The kind of the object for the unique identifier.
+
+ Returns:
+ A dictionary of the event added to the log.
+ """
+ event = {
+ 'id': (kind, self._next_id.next()),
+ 'name': name,
+ 'task_name': task_name,
+ 'start_time': start,
+ 'try': try_count,
+ }
+
+ if self._parent:
+ event['parent'] = self._parent['id']
+
+ if success is not None or finish is not None:
+ self.FinishEvent(event, finish, success)
+
+ self._log.append(event)
+ return event
+
+ def AddSync(self, project, task_name, start, finish, success):
+ """Add a event to the log for a sync command.
+
+ Args:
+ project: Project being synced.
+ task_name: A sub-task that was performed for name.
+ One of (TASK_SYNC_NETWORK, TASK_SYNC_LOCAL)
+ start: Timestamp of when the operation started.
+ finish: Timestamp of when the operation finished.
+ success: Boolean indicating if the operation was successful.
+
+ Returns:
+ A dictionary of the event added to the log.
+ """
+ event = self.Add(project.relpath, success, start, finish, task_name)
+ if event is not None:
+ event['project'] = project.name
+ if project.revisionExpr:
+ event['revision'] = project.revisionExpr
+ if project.remote.url:
+ event['project_url'] = project.remote.url
+ if project.remote.fetchUrl:
+ event['remote_url'] = project.remote.fetchUrl
+ try:
+ event['git_hash'] = project.GetCommitRevisionId()
+ except Exception:
+ pass
+ return event
+
+ def GetStatusString(self, success):
+ """Converst a boolean success to a status string.
+
+ Args:
+ success: Boolean indicating if the operation was successful.
+
+ Returns:
+ status string.
+ """
+ return 'pass' if success else 'fail'
+
+ def FinishEvent(self, event, finish, success):
+ """Finishes an incomplete event.
+
+ Args:
+ event: An event that has been added to the log.
+ finish: Timestamp of when the operation finished.
+ success: Boolean indicating if the operation was successful.
+
+ Returns:
+ A dictionary of the event added to the log.
+ """
+ event['status'] = self.GetStatusString(success)
+ event['finish_time'] = finish
+ return event
+
+ def SetParent(self, event):
+ """Set a parent event for all new entities.
+
+ Args:
+ event: The event to use as a parent.
+ """
+ self._parent = event
+
+ def Write(self, filename):
+ """Writes the log out to a file.
+
+ Args:
+ filename: The file to write the log to.
+ """
+ with open(filename, 'w+') as f:
+ for e in self._log:
+ json.dump(e, f, sort_keys=True)
+ f.write('\n')
+
+
+def _EventIdGenerator():
+ """Returns multi-process safe iterator that generates locally unique id.
+
+ Yields:
+ A unique, to this invocation of the program, integer id.
+ """
+ eid = multiprocessing.Value('i', 1)
+
+ while True:
+ with eid.get_lock():
+ val = eid.value
+ eid.value += 1
+ yield val
diff --git a/main.py b/main.py
index f965d7e..386b4f1 100755
--- a/main.py
+++ b/main.py
@@ -37,6 +37,7 @@
kerberos = None
from color import SetDefaultColoring
+import event_log
from trace import SetTrace
from git_command import git, GitCommand
from git_config import init_ssh, close_ssh
@@ -85,6 +86,9 @@
global_options.add_option('--version',
dest='show_version', action='store_true',
help='display this version of repo')
+global_options.add_option('--event-log',
+ dest='event_log', action='store',
+ help='filename of event log to append timeline to')
class _Repo(object):
def __init__(self, repodir):
@@ -176,6 +180,8 @@
RunPager(config)
start = time.time()
+ cmd_event = cmd.event_log.Add(name, event_log.TASK_COMMAND, start)
+ cmd.event_log.SetParent(cmd_event)
try:
result = cmd.Execute(copts, cargs)
except (DownloadError, ManifestInvalidRevisionError,
@@ -203,7 +209,8 @@
result = e.code
raise
finally:
- elapsed = time.time() - start
+ finish = time.time()
+ elapsed = finish - start
hours, remainder = divmod(elapsed, 3600)
minutes, seconds = divmod(remainder, 60)
if gopts.time:
@@ -213,6 +220,12 @@
print('real\t%dh%dm%.3fs' % (hours, minutes, seconds),
file=sys.stderr)
+ cmd.event_log.FinishEvent(cmd_event, finish,
+ result is None or result == 0)
+ if gopts.event_log:
+ cmd.event_log.Write(os.path.abspath(
+ os.path.expanduser(gopts.event_log)))
+
return result
diff --git a/manifest_xml.py b/manifest_xml.py
index 4f0eb64..55d25a7 100644
--- a/manifest_xml.py
+++ b/manifest_xml.py
@@ -110,7 +110,8 @@
return url
def ToRemoteSpec(self, projectName):
- url = self.resolvedFetchUrl.rstrip('/') + '/' + projectName
+ fetchUrl = self.resolvedFetchUrl.rstrip('/')
+ url = fetchUrl + '/' + projectName
remoteName = self.name
if self.remoteAlias:
remoteName = self.remoteAlias
@@ -118,7 +119,8 @@
url=url,
pushUrl=self.pushUrl,
review=self.reviewUrl,
- orig_name=self.name)
+ orig_name=self.name,
+ fetchUrl=self.fetchUrl)
class XmlManifest(object):
"""manages the repo configuration file"""
diff --git a/project.py b/project.py
index 45ecfe3..269fd7e 100644
--- a/project.py
+++ b/project.py
@@ -323,13 +323,15 @@
pushUrl=None,
review=None,
revision=None,
- orig_name=None):
+ orig_name=None,
+ fetchUrl=None):
self.name = name
self.url = url
self.pushUrl = pushUrl
self.review = review
self.revision = revision
self.orig_name = orig_name
+ self.fetchUrl = fetchUrl
class RepoHook(object):
@@ -2876,13 +2878,14 @@
self.detach_head = detach_head
self.clean = True
+ self.recent_clean = True
def info(self, project, fmt, *args):
self._messages.append(_InfoMessage(project, fmt % args))
def fail(self, project, err=None):
self._failures.append(_Failure(project, err))
- self.clean = False
+ self._MarkUnclean()
def later1(self, project, what):
self._later_queue1.append(_Later(project, what))
@@ -2896,6 +2899,15 @@
self._PrintMessages()
return self.clean
+ def Recently(self):
+ recent_clean = self.recent_clean
+ self.recent_clean = True
+ return recent_clean
+
+ def _MarkUnclean(self):
+ self.clean = False
+ self.recent_clean = False
+
def _RunLater(self):
for q in ['_later_queue1', '_later_queue2']:
if not self._RunQueue(q):
@@ -2904,7 +2916,7 @@
def _RunQueue(self, queue):
for m in getattr(self, queue):
if not m.Run(self):
- self.clean = False
+ self._MarkUnclean()
return False
setattr(self, queue, [])
return True
diff --git a/subcmds/sync.py b/subcmds/sync.py
index 82056f3..ef02327 100644
--- a/subcmds/sync.py
+++ b/subcmds/sync.py
@@ -64,6 +64,7 @@
except ImportError:
multiprocessing = None
+import event_log
from git_command import GIT, git_require
from git_config import GetUrlCookieFile
from git_refs import R_HEADS, HEAD
@@ -304,9 +305,10 @@
# - We always set err_event in the case of an exception.
# - We always make sure we call sem.release().
# - We always make sure we unlock the lock if we locked it.
+ start = time.time()
+ success = False
try:
try:
- start = time.time()
success = project.Sync_NetworkHalf(
quiet=opt.quiet,
current_branch_only=opt.current_branch_only,
@@ -345,6 +347,9 @@
finally:
if did_lock:
lock.release()
+ finish = time.time()
+ self.event_log.AddSync(project, event_log.TASK_SYNC_NETWORK,
+ start, finish, success)
return success
@@ -720,16 +725,24 @@
_PostRepoUpgrade(self.manifest, quiet=opt.quiet)
if not opt.local_only:
- mp.Sync_NetworkHalf(quiet=opt.quiet,
- current_branch_only=opt.current_branch_only,
- no_tags=opt.no_tags,
- optimized_fetch=opt.optimized_fetch,
- submodules=self.manifest.HasSubmodules)
+ start = time.time()
+ success = mp.Sync_NetworkHalf(quiet=opt.quiet,
+ current_branch_only=opt.current_branch_only,
+ no_tags=opt.no_tags,
+ optimized_fetch=opt.optimized_fetch,
+ submodules=self.manifest.HasSubmodules)
+ finish = time.time()
+ self.event_log.AddSync(mp, event_log.TASK_SYNC_NETWORK,
+ start, finish, success)
if mp.HasChanges:
syncbuf = SyncBuffer(mp.config)
+ start = time.time()
mp.Sync_LocalHalf(syncbuf, submodules=self.manifest.HasSubmodules)
- if not syncbuf.Finish():
+ clean = syncbuf.Finish()
+ self.event_log.AddSync(mp, event_log.TASK_SYNC_LOCAL,
+ start, time.time(), clean)
+ if not clean:
sys.exit(1)
self._ReloadManifest(manifest_name)
if opt.jobs is None:
@@ -823,7 +836,10 @@
for project in all_projects:
pm.update()
if project.worktree:
+ start = time.time()
project.Sync_LocalHalf(syncbuf, force_sync=opt.force_sync)
+ self.event_log.AddSync(project, event_log.TASK_SYNC_LOCAL,
+ start, time.time(), syncbuf.Recently())
pm.end()
print(file=sys.stderr)
if not syncbuf.Finish():
@@ -907,6 +923,7 @@
return False
return True
+
class _FetchTimes(object):
_ALPHA = 0.5