Download:
child 1:cb057921cd8c
0:3ddb9df55b5d
Anton Shestakov <engored@ya.ru>, Sat, 02 May 2015 01:47:27 +0800
update and deployment scripts

5 файлов изменено, 210 вставок(+), 0 удалений(-) [+]
.hgignore file | annotate | diff | comparison | revisions
bench.py file | annotate | diff | comparison | revisions
deploy.sh file | annotate | diff | comparison | revisions
settings.py file | annotate | diff | comparison | revisions
update.sh file | annotate | diff | comparison | revisions
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/.hgignore Sat May 02 01:47:27 2015 +0800
@@ -0,0 +1,5 @@
+syntax: glob
+
+*.pyc
+data/
+venv/
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/bench.py Sat May 02 01:47:27 2015 +0800
@@ -0,0 +1,169 @@
+#!/usr/bin/env python
+import errno
+import logging
+import os
+import shutil
+import sqlite3
+import subprocess
+import time
+from argparse import ArgumentParser
+from collections import OrderedDict
+
+from settings import DBPATH, HG, LOCKFILE, TESTHGREPO, TESTREPO, rel
+
+
+MARKS = OrderedDict((
+ ('blame', 'hg blame README'),
+ ('diff', 'hg diff -r "tip~100:tip" README'),
+ ('diffg', 'hg diff -r "tip~100:tip" --git README'),
+ ('stcp', 'hg status --copies README'),
+ ('logfile', 'hg log README'),
+ ('logfilecp', 'hg log --copies README'),
+ ('log1', 'hg log -l1'),
+ ('log1000', 'hg log -l1000')
+))
+
+
+parser = ArgumentParser(description='Benchmark revisions and put results in the db.')
+parser.add_argument('revsets', metavar='REVSET', default='last(all(), 120)', nargs='*', help='update these revisions')
+parser.add_argument('--retry', action='store_true', help='try and reduce existing timings')
+
+
+def getnodes(revsets):
+ cmd = [HG, 'log', '-R', TESTHGREPO, '-T', '{node}\n']
+ for revset in revsets:
+ cmd += ['-r', revset]
+ output = subprocess.check_output(cmd)
+ return output.split()
+
+
+def test(mark, mintime=1.0, mintries=3, dropcache=True):
+ results = []
+
+ cmd = [rel(TESTHGREPO, 'hg'), '-R', TESTREPO]
+
+ if mark == 'blame':
+ cmd += ['blame', rel(TESTREPO, 'README')]
+ elif mark == 'stco':
+ cmd += ['status', '--copies', rel(TESTREPO, 'README')]
+ elif mark == 'diff':
+ cmd += ['diff', '-r', 'tip~100:tip', rel(TESTREPO, 'README')]
+ elif mark == 'diffg':
+ cmd += ['diff', '-r', 'tip~100:tip', '--git', rel(TESTREPO, 'README')]
+ elif mark == 'logfile':
+ cmd += ['log', rel(TESTREPO, 'README')]
+ elif mark == 'logfilecp':
+ cmd += ['log', '--copies', rel(TESTREPO, 'README')]
+ elif mark == 'log1':
+ cmd += ['log', '-l1']
+ elif mark == 'log1000':
+ cmd += ['log', '-l1000']
+
+ while sum(results) < mintime and len(results) < mintries:
+ if dropcache:
+ shutil.rmtree(rel(TESTREPO, '.hg', 'cache'), ignore_errors=True)
+ start = time.time()
+ try:
+ subprocess.check_output(cmd)
+ except subprocess.CalledProcessError:
+ return None
+ results.append(time.time() - start)
+
+ return min(results)
+
+
+def makeclean():
+ subprocess.check_output(['make', '--directory', TESTHGREPO, 'clean'], stderr=subprocess.STDOUT)
+
+
+def makelocal(node):
+ subprocess.check_output([HG, 'update', '-R', TESTHGREPO, '--clean', node], stderr=subprocess.STDOUT)
+ subprocess.check_output(['make', '--directory', TESTHGREPO, 'local'], stderr=subprocess.STDOUT)
+
+
+def dbinit():
+ conn = sqlite3.connect(DBPATH)
+ conn.execute(
+ 'CREATE TABLE IF NOT EXISTS results ('
+ ' node CHAR(40) NOT NULL,'
+ ' mark VARCHAR(40) NOT NULL,'
+ ' time FLOAT NOT NULL,'
+ ' cache BOOL NOT NULL'
+ ')')
+ conn.execute(
+ 'CREATE INDEX IF NOT EXISTS idx_results_node ON results (node)')
+ conn.commit()
+ conn.close()
+
+
+def dbupdate(revsets, retry=False):
+ conn = sqlite3.connect(DBPATH)
+ makeclean()
+ nodes = getnodes(revsets)
+
+ for i, node in enumerate(nodes, 1):
+ madelocal = False
+
+ for mark in MARKS:
+ for cache in (False, True):
+ old = conn.execute(
+ 'SELECT time FROM results WHERE node = ? AND mark = ? AND cache = ?',
+ (node, mark, cache)).fetchall()
+ oldtime = old[0][0] if old else None
+
+ if oldtime is not None and not retry:
+ continue
+
+ if not madelocal:
+ makelocal(node)
+ madelocal = True
+
+ time = test(mark, dropcache=not cache)
+ logging.info('%05d/%05d %s %s %f', i, len(nodes), node, mark, time)
+
+ if time is None:
+ continue
+
+ if oldtime is None:
+ conn.execute(
+ 'INSERT INTO results (node, mark, time, cache) VALUES (?, ?, ?, ?)',
+ (node, mark, time, cache))
+ elif time < oldtime:
+ conn.execute(
+ 'UPDATE results SET time = ? WHERE node = ? AND mark = ? AND cache = ?',
+ (time, node, mark, cache))
+
+ conn.commit()
+
+ conn.close()
+
+
+def lock():
+ flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL
+ try:
+ return os.fdopen(os.open(LOCKFILE, flags), 'w')
+ except OSError as e:
+ if e.errno == errno.EEXIST:
+ logging.error('cannot lock data directory')
+ raise
+ else:
+ raise
+
+
+def unlock():
+ os.remove(LOCKFILE)
+
+
+def main(revsets, retry=False):
+ lock()
+ try:
+ dbinit()
+ dbupdate(revsets, retry)
+ finally:
+ unlock()
+
+
+if __name__ == '__main__':
+ logging.getLogger().setLevel(logging.INFO)
+ args = parser.parse_args()
+ main(args.revsets, args.retry)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/deploy.sh Sat May 02 01:47:27 2015 +0800
@@ -0,0 +1,15 @@
+#!/bin/sh
+set -x
+
+mkdir -p ./data
+
+export HGPLAIN=1
+
+HGURL='http://selenic.com/hg'
+TESTURL='http://selenic.com/hg'
+
+hg clone "$HGURL" ./data/testhg
+hg archive -R ./data/testhg ./data/newesthg
+make --directory ./data/newesthg local
+
+hg clone "$TESTURL" -r 3.3.3 ./data/testrepo
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/settings.py Sat May 02 01:47:27 2015 +0800
@@ -0,0 +1,14 @@
+import os
+
+
+os.environ['HGPLAIN'] = '1'
+
+
+rel = lambda *x: os.path.abspath(os.path.join(os.path.dirname(__file__), *x))
+
+
+DBPATH = rel('data', 'db.sqlite')
+HG = rel('data', 'newesthg', 'hg')
+LOCKFILE = rel('data', 'bench.lock')
+TESTHGREPO = rel('data', 'testhg')
+TESTREPO = rel('data', 'testrepo')
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/update.sh Sat May 02 01:47:27 2015 +0800
@@ -0,0 +1,7 @@
+#!/bin/sh
+
+export HGPLAIN=1
+
+nodes=`./data/newesthg/hg incoming -R ./data/testhg --quiet -T '{node}\n'`
+./data/newesthg/hg pull -R ./data/testhg
+./bench.py $nodes