[PATCH 1/1] gitdm: convert to Python3
Heinrich Schuchardt
heinrich.schuchardt at canonical.com
Sat Jul 16 09:43:15 CEST 2022
Python 2 is beyond end of life.
gitdm works fine. The other scripts may need further testing.
Signed-off-by: Heinrich Schuchardt <heinrich.schuchardt at canonical.com>
---
ConfigFile.py | 4 +--
changelogs | 49 +++++++++++++-------------
committags | 10 +++---
csvdump.py | 2 +-
database.py | 10 +++---
findoldfiles | 2 +-
firstlast | 44 +++++++++++------------
gitdm | 21 ++++++-----
gitlog.py | 12 +++----
grabdates | 11 +++---
inittags | 8 ++---
linetags | 17 ++++-----
logparser.py | 11 +++---
reports.py | 98 ++++++++++++++++++++++++---------------------------
stablefixes | 8 ++---
treeplot | 27 +++++++-------
16 files changed, 167 insertions(+), 167 deletions(-)
diff --git a/ConfigFile.py b/ConfigFile.py
index 3a1e208..559e6d0 100644
--- a/ConfigFile.py
+++ b/ConfigFile.py
@@ -147,10 +147,10 @@ def ReadFileType (filename):
m = regex_file_type.match (line)
if not m or len (m.groups ()) != 2:
ConfigFile.croak ('Funky file type line "%s"' % (line))
- if not patterns.has_key (m.group (1)):
+ if not m.group (1) in patterns:
patterns[m.group (1)] = []
if m.group (1) not in order:
- print '%s not found, appended to the last order' % m.group (1)
+ print('%s not found, appended to the last order' % m.group (1))
order.append (m.group (1))
patterns[m.group (1)].append (re.compile (m.group (2), re.IGNORECASE))
diff --git a/changelogs b/changelogs
index 1077a03..0ca2bb9 100755
--- a/changelogs
+++ b/changelogs
@@ -1,4 +1,4 @@
-#!/usr/bin/pypy
+#!/usr/bin/python3
# -*- python -*-
#
# Go munging through changelogs for interesting info.
@@ -34,15 +34,14 @@ def LogNoSOBCulprit(culprit):
NoSOBCulprits[culprit] = 1
def SortedCulprits(culprits):
- def compare(c1, c2):
- return culprits[c2] - culprits[c1]
- names = culprits.keys()
- names.sort(compare)
+ def compare(key):
+ return culprits[key]
+ names = sorted(culprits.keys(), key=compare, reverse=True)
return names
-def PrintCulprits(culprits, sorted):
- for name in sorted:
- print '\t%30s: %d' % (name, culprits[name])
+def PrintCulprits(culprits, _sorted):
+ for name in _sorted:
+ print('\t%30s: %d' % (name, culprits[name]))
#
# Patch logging
#
@@ -112,20 +111,20 @@ while p:
Nothers += 1
p = gitlog.grabpatch(sys.stdin)
-print '%d patches, %d w/o changelog' % (Npatches, NemptyCL)
-print ' %d w/o signoff, %d w/1 signoff, %d no others, %d SS culprits' % \
- (Nnosob, Nsinglesob, Nsinglesob - Nothers, len(SingleSSCulprits))
-print '\nMost single signoffs:'
-sorted = SortedCulprits(SingleSSCulprits)[:20]
-PrintCulprits(SingleSSCulprits, sorted)
-WritePatches(sorted, SSPatches, 'sspatches.html')
-
-print '\nMost empty changelogs:'
-sorted = SortedCulprits(EmptyCulprits)[:20]
-PrintCulprits(EmptyCulprits, sorted)
-WritePatches(sorted, EmptyCLPatches, 'emptypatches.html')
-
-print '\nNoSOB:'
-sorted = SortedCulprits(NoSOBCulprits)
-PrintCulprits(NoSOBCulprits, sorted)
-WritePatches(sorted, NoSOBPatches, 'nosobpatches.html')
+print('%d patches, %d w/o changelog' % (Npatches, NemptyCL))
+print(' %d w/o signoff, %d w/1 signoff, %d no others, %d SS culprits' % \
+ (Nnosob, Nsinglesob, Nsinglesob - Nothers, len(SingleSSCulprits)))
+print('\nMost single signoffs:')
+_sorted = SortedCulprits(SingleSSCulprits)[:20]
+PrintCulprits(SingleSSCulprits, _sorted)
+WritePatches(_sorted, SSPatches, 'sspatches.html')
+
+print('\nMost empty changelogs:')
+_sorted = SortedCulprits(EmptyCulprits)[:20]
+PrintCulprits(EmptyCulprits, _sorted)
+WritePatches(_sorted, EmptyCLPatches, 'emptypatches.html')
+
+print('\nNoSOB:')
+_sorted = SortedCulprits(NoSOBCulprits)
+PrintCulprits(NoSOBCulprits, _sorted)
+WritePatches(_sorted, NoSOBPatches, 'nosobpatches.html')
diff --git a/committags b/committags
index d015237..b96108c 100755
--- a/committags
+++ b/committags
@@ -1,4 +1,4 @@
-#!/usr/bin/pypy
+#!/usr/bin/python3
#
# Generate a database of commits and major versions they went into.
#
@@ -55,10 +55,10 @@ args = p.parse_args()
# Pull in an existing database if requested.
#
if args.load:
- DB = pickle.load(open(args.database, 'r'))
+ DB = pickle.load(open(args.database, 'rb'))
else:
DB = { }
-out = open(args.database, 'w')
+out = open(args.database, 'wb')
#
# Time to fire up git.
@@ -93,9 +93,9 @@ for line in input.readlines():
#
nc += 1
if (nc % 25) == 0:
- print '%6d %s %s \r' % (nc, commit[:8], tag),
+ print('%6d %s %s \r' % (nc, commit[:8], tag),)
sys.stdout.flush()
-print '\nFound %d/%d commits' % (nc, len(DB.keys()))
+print('\nFound %d/%d commits' % (nc, len(DB.keys())))
pickle.dump(DB, out)
out.close()
diff --git a/csvdump.py b/csvdump.py
index 9d1a65e..c3aec3b 100644
--- a/csvdump.py
+++ b/csvdump.py
@@ -50,7 +50,7 @@ def store_patch(patch):
ChangeSets.append([patch.commit, str(patch.date),
patch.email, domain, author, employer,
patch.added, patch.removed])
- for (filetype, (added, removed)) in patch.filetypes.iteritems():
+ for (filetype, (added, removed)) in patch.filetypes.items():
FileTypes.append([patch.commit, filetype, added, removed])
diff --git a/database.py b/database.py
index bf13227..232049e 100644
--- a/database.py
+++ b/database.py
@@ -40,7 +40,7 @@ class Hacker:
for edate, empl in self.employer[i]:
if edate > date:
return empl
- print 'OOPS. ', self.name, self.employer, self.email, email, date
+ print('OOPS. ', self.name, self.employer, self.email, email, date)
return None # Should not happen
def addpatch (self, patch):
@@ -216,7 +216,7 @@ class VirtualEmployer (Employer):
def store (self):
if Employers.has_key (self.name):
- print Employers[self.name]
+ print(Employers[self.name])
sys.stderr.write ('WARNING: Virtual empl %s overwrites another\n'
% (self.name))
if len (self.splits) == 0:
@@ -261,7 +261,7 @@ def MixVirtuals ():
EmailAliases = { }
def AddEmailAlias (variant, canonical):
- if EmailAliases.has_key (variant):
+ if variant in EmailAliases:
sys.stderr.write ('Duplicate email alias for %s\n' % (variant))
EmailAliases[variant] = canonical
@@ -288,7 +288,7 @@ def AddEmailEmployerMapping (email, employer, end = nextyear):
for i in range (0, len(l)):
date, xempl = l[i]
if date == end: # probably both nextyear
- print 'WARNING: duplicate email/empl for %s' % (email)
+ print('WARNING: duplicate email/empl for %s' % (email))
if date > end:
l.insert (i, (end, empl))
return
@@ -305,7 +305,7 @@ def MapToEmployer (email, unknown = 0):
pass
namedom = email.split ('@')
if len (namedom) < 2:
- print 'Oops...funky email %s' % email
+ print('Oops...funky email %s' % email)
return [(nextyear, GetEmployer ('Funky'))]
s = namedom[1].split ('.')
for dots in range (len (s) - 2, -1, -1):
diff --git a/findoldfiles b/findoldfiles
index 493d5d3..c36dd8a 100755
--- a/findoldfiles
+++ b/findoldfiles
@@ -19,7 +19,7 @@ def CheckFile(file):
git = os.popen('git log --pretty=oneline -1 ' + file, 'r')
line = git.readline()
if line.startswith(OriginalSin):
- print file
+ print(file)
git.close()
#
# Here we just plow through all the files.
diff --git a/firstlast b/firstlast
index 2b07952..09e60e8 100755
--- a/firstlast
+++ b/firstlast
@@ -1,4 +1,4 @@
-#!/usr/bin/pypy
+#!/usr/bin/python3
# -*- python -*-
#
# Crank through the log looking at when developers did their first and
@@ -52,15 +52,14 @@ def TrackFirstDirs(patch):
except KeyError:
FirstDirs[dir] = 1
-def cmpdirs(d1, d2):
- return FirstDirs[d2] - FirstDirs[d1]
+def cmpdirs(d1):
+ return FirstDirs[d1]
def PrintFirstDirs():
- print '\nDirectories touched by first commits:'
- dirs = FirstDirs.keys()
- dirs.sort(cmpdirs)
+ print('\nDirectories touched by first commits:')
+ dirs = sorted(FirstDirs.keys(), key=cmpdirs, reverse=True)
for dir in dirs[:20]:
- print '%5d: %s' % (FirstDirs[dir], dir)
+ print('%5d: %s' % (FirstDirs[dir], dir))
#
# Let's also track who they worked for.
@@ -73,22 +72,21 @@ def TrackFirstEmpl(name):
except KeyError:
FirstEmpls[name] = 1
-def cmpempls(e1, e2):
- return FirstEmpls[e2] - FirstEmpls[e1]
+def cmpempls(e1):
+ return - FirstEmpls[e1]
def PrintFirstEmpls():
- empls = FirstEmpls.keys()
- empls.sort(cmpempls)
- print '\nEmployers:'
+ empls = sorted(FirstEmpls.keys(), key=cmpempls)
+ print('\nEmployers:')
for e in empls[:30]:
- print '%5d: %s' % (FirstEmpls[e], e)
+ print('%5d: %s' % (FirstEmpls[e], e))
#
# We "know" that unknown/none are always the top two...
#
companies = 0
for e in empls[2:]:
companies += FirstEmpls[e]
- print 'Companies: %d' % (companies)
+ print('Companies: %d' % (companies))
#
# Version comparison stuff. Kernel-specific, obviously.
@@ -126,7 +124,7 @@ def TrackingVersion(vers):
# Main program.
#
args = SetupArgs()
-VDB = pickle.load(open(args.versiondb, 'r'))
+VDB = pickle.load(open(args.versiondb, 'rb'))
ConfigFile.ConfigFile(args.config, args.dbdir)
SetTrackingVersions(args)
@@ -142,7 +140,7 @@ while patch:
try:
v = VDB[patch.commit]
except KeyError:
- print 'Funky commit', patch.commit
+ print('Funky commit', patch.commit)
patch = gitlog.grabpatch(sys.stdin)
continue
#
@@ -174,21 +172,21 @@ for h in database.AllHackers():
try:
empl = h.emailemployer(p.email, p.date)
except AttributeError:
- print 'No email on ', p.commit
+ print('No email on ', p.commit)
continue
if empl.name == '(Unknown)':
- print 'UNK: %s %s' % (p.email, h.name)
+ print('UNK: %s %s' % (p.email, h.name))
TrackFirstEmpl(empl.name)
versions = Lasts.keys()
-def cmpvers(v1, v2):
- return versionmap(v1) - versionmap(v2) # reverse sort
-versions.sort(cmpvers)
+def cmpvers(v1):
+ return versionmap(v1)
+versions = sorted(versions, key=cmpvers)
for v in versions:
if args.minversions <= 1:
- print v, len(Firsts[v]), len(Lasts[v]), Singles[v]
+ print(v, len(Firsts[v]), len(Lasts[v]), Singles[v])
else:
- print v, len(Firsts.get(v, [])), len(Lasts.get(v, []))
+ print(v, len(Firsts.get(v, [])), len(Lasts.get(v, [])))
PrintFirstDirs()
PrintFirstEmpls()
diff --git a/gitdm b/gitdm
index 61318ad..e4b8b7b 100755
--- a/gitdm
+++ b/gitdm
@@ -1,4 +1,4 @@
-#!/usr/bin/pypy
+#!/usr/bin/python3
#-*- coding:utf-8 -*-
#
@@ -15,10 +15,12 @@
import database, csvdump, ConfigFile, reports
import getopt, datetime
-import os, re, sys, rfc822, string, os.path
+import os, re, sys, email.utils as rfc822, string, os.path
import logparser
from patterns import patterns
+sys.stdin.reconfigure(encoding='utf-8')
+
Today = datetime.date.today()
#
@@ -108,7 +110,7 @@ def ParseOpts():
elif opt[0] == '-p':
CSVPrefix = opt[1]
elif opt[0] == '-r':
- print 'Filter on "%s"' % (opt[1])
+ print('Filter on "%s"' % (opt[1]))
FileFilter = re.compile(opt[1])
elif opt[0] == '-s':
AuthorSOBs = 0
@@ -120,7 +122,7 @@ def ParseOpts():
ReportUnknowns = True
elif opt[0] == '-x':
CSVFile = open(opt[1], 'w')
- print "open output file " + opt[1] + "\n"
+ print("open output file " + opt[1] + "\n")
elif opt [0] == '-w':
Aggregate = 'week'
elif opt [0] == '-y':
@@ -172,7 +174,7 @@ DateMap = { }
def AddDateLines(date, lines):
if lines > 1000000:
- print 'Skip big patch (%d)' % lines
+ print('Skip big patch (%d)' % lines)
return
try:
DateMap[date] += lines
@@ -208,6 +210,7 @@ class patch:
self.reports = [ ]
self.filetypes = {}
self.files = [ ]
+ self.date = datetime.date(1970, 1, 1)
def addreviewer(self, reviewer):
self.reviews.append(reviewer)
@@ -389,7 +392,7 @@ def GripeAboutAuthorName(name):
if name in GripedAuthorNames:
return
GripedAuthorNames.append(name)
- print '%s is an author name, probably not what you want' % (name)
+ print('%s is an author name, probably not what you want' % (name))
def ApplyFileFilter(line, ignore):
#
@@ -462,14 +465,14 @@ TotalChanged = TotalAdded = TotalRemoved = 0
#
# Snarf changesets.
#
-print >> sys.stderr, 'Grabbing changesets...\r',
+print('Grabbing changesets...\r', file=sys.stderr)
patches = logparser.LogPatchSplitter(sys.stdin)
printcount = CSCount = 0
for logpatch in patches:
if (printcount % 50) == 0:
- print >> sys.stderr, 'Grabbing changesets...%d\r' % printcount,
+ print('Grabbing changesets...%d\r' % printcount, file=sys.stderr)
printcount += 1
# We want to ignore commits on svn tags since in Subversion
@@ -528,7 +531,7 @@ for logpatch in patches:
CSCount += 1
csvdump.AccumulatePatch(p, Aggregate)
csvdump.store_patch(p)
-print >> sys.stderr, 'Grabbing changesets...done '
+print('Grabbing changesets...done ', file=sys.stderr)
if DumpDB:
database.DumpDB()
diff --git a/gitlog.py b/gitlog.py
index 71efee1..44c9ffe 100644
--- a/gitlog.py
+++ b/gitlog.py
@@ -3,7 +3,7 @@
#
# Someday this will be the only version of grabpatch, honest.
#
-import re, rfc822, datetime
+import re, email.utils, datetime
from patterns import patterns
import database
@@ -61,7 +61,7 @@ S_DONE = 5
def get_header(patch, line, input):
if line == '':
if patch.author == '':
- print 'Funky auth line in', patch.commit
+ print('Funky auth line in', patch.commit)
patch.author = database.LookupStoreHacker('Unknown',
'unknown at hacker.net')
return S_DESC
@@ -72,13 +72,13 @@ def get_header(patch, line, input):
else:
m = patterns['date'].match(line)
if m:
- dt = rfc822.parsedate(m.group(2))
+ dt = email.utils.parsedate(m.group(2))
patch.date = datetime.date(dt[0], dt[1], dt[2])
return S_HEADER
def get_desc(patch, line, input):
if not line:
- print 'Missing desc in', patch.commit
+ print('Missing desc in', patch.commit)
return S_CHANGELOG
patch.desc = line
line = getline(input)
@@ -188,7 +188,7 @@ def grabpatch(input):
return None
m = patterns['commit'].match(line)
if not m:
- print 'noncommit', line
+ print('noncommit', line)
return None
p = patch(m.group(1))
state = S_HEADER
@@ -199,7 +199,7 @@ def grabpatch(input):
line = getline(input)
if line is None:
if state != S_NUMSTAT:
- print 'Ran out of patch', state
+ print('Ran out of patch', state)
return None
return p
state = grabbers[state](p, line, input)
diff --git a/grabdates b/grabdates
index 3155792..f67ab67 100755
--- a/grabdates
+++ b/grabdates
@@ -1,17 +1,18 @@
-#!/usr/bin/pypy
+#!/usr/bin/python3
# -*- python -*-
#
# git log | grep '^Date:' | grabdates
#
-import sys
+import io, sys
from utils import accumulator
tzs = accumulator()
+input_stream=io.TextIOWrapper(sys.stdin.buffer, encoding='utf-8', errors='ignore')
-for line in sys.stdin.readlines():
+for line in input_stream.readlines():
split = line.split()
if split[0] != 'Date:':
- print 'Funky line: ', line
+ print('Funky line: ', line)
sys.exit(1)
zone = int(split[-1])
tzs.incr(zone)
@@ -19,4 +20,4 @@ for line in sys.stdin.readlines():
zones = tzs.keys()
zones.sort()
for zone in zones:
- print '%05d %d' % (zone, tzs[zone])
+ print('%05d %d' % (zone, tzs[zone]))
diff --git a/inittags b/inittags
index 3e82913..97261c2 100755
--- a/inittags
+++ b/inittags
@@ -1,4 +1,4 @@
-#!/usr/bin/pypy
+#!/usr/bin/python3
# -*- python -*-
#
# Generate a database of commits and major versions they went into.
@@ -49,10 +49,10 @@ args = p.parse_args()
# Pull in an existing database if requested.
#
if args.load:
- DB = pickle.load(open(args.database, 'r'))
+ DB = pickle.load(open(args.database, 'rb'))
else:
DB = { }
-out = open(args.database, 'w')
+out = open(args.database, 'wb')
if args.repository:
os.chdir(args.repository)
@@ -77,6 +77,6 @@ for v in range(1, final):
GetCommits('v4.%d..v4.%d' % (v - 1, v), 'v4.%d' % (v))
GetCommits('v4.%d..' % (final - 1), 'v4.%d' % (final))
-print '\nFound %d commits' % (len(DB.keys()))
+print('\nFound %d commits' % (len(DB.keys())))
pickle.dump(DB, out)
out.close()
diff --git a/linetags b/linetags
index 5106be2..f31327c 100755
--- a/linetags
+++ b/linetags
@@ -1,4 +1,4 @@
-#!/usr/bin/pypy
+#!/usr/bin/python3
#
# Find out how many lines were introduced in each major release.
#
@@ -12,16 +12,17 @@
# This file may be distributed under the terms of the GNU General
# Public License, version 2.
#
-import sys, re, os, pickle
+import io, sys, re, os, pickle
CommitLines = { }
commitpat = re.compile(r'^([\da-f][\da-f]+) ')
def GetCommitLines(file):
- print file
+ print(file)
blame = os.popen('git blame -p ' + file, 'r')
- for line in blame.readlines():
+ input_stream=io.TextIOWrapper(blame.buffer, encoding='utf-8', errors='ignore')
+ for line in input_stream.readlines():
m = commitpat.search(line)
#
# All-zero commits mean we got fed a file that git doesn't
@@ -44,12 +45,12 @@ def CommitToTag(commit):
try:
return DB[commit]
except KeyError:
- print 'Missing commit %s' % (commit)
+ print('Missing commit %s' % (commit))
return 'WTF?'
TagLines = { }
def MapCommits():
- print 'Mapping tags...'
+ print('Mapping tags...')
for commit in CommitLines.keys():
tag = CommitToTag(commit)
try:
@@ -66,7 +67,7 @@ if len(sys.argv) != 2:
#
# Grab the tags/version database.
#
-dbf = open('committags.db', 'r')
+dbf = open('committags.db', 'rb')
DB = pickle.load(dbf)
dbf.close()
@@ -79,7 +80,7 @@ for file in files.readlines():
MapCommits()
# print TagLines
tags = TagLines.keys()
-tags.sort()
+tags = sorted(tags)
for tag in tags:
out.write('%s %d\n' % (tag, TagLines[tag]))
out.close()
diff --git a/logparser.py b/logparser.py
index b375034..548390c 100644
--- a/logparser.py
+++ b/logparser.py
@@ -17,7 +17,7 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA
-import sys
+import io, sys
from patterns import patterns
class LogPatchSplitter:
@@ -35,13 +35,14 @@ class LogPatchSplitter:
def __init__(self, fd):
self.fd = fd
+ self.input_stream=io.TextIOWrapper(sys.stdin.buffer, encoding='utf-8', errors='ignore')
self.buffer = None
self.patch = []
def __iter__(self):
return self
- def next(self):
+ def __next__(self):
patch = self.__grab_patch__()
if not patch:
raise StopIteration
@@ -76,7 +77,7 @@ class LogPatchSplitter:
patch.append(line)
self.buffer = None
- line = self.fd.readline()
+ line = self.input_stream.readline()
return patch
@@ -85,6 +86,6 @@ if __name__ == '__main__':
patches = LogPatchSplitter(sys.stdin)
for patch in patches:
- print '---------- NEW PATCH ----------'
+ print('---------- NEW PATCH ----------')
for line in patch:
- print line,
+ print(line,)
diff --git a/reports.py b/reports.py
index d7a96bc..9a2ab2f 100644
--- a/reports.py
+++ b/reports.py
@@ -69,11 +69,11 @@ def EndReport():
#
# Comparison and report generation functions.
#
-def ComparePCount(h1, h2):
- return len(h2.patches) - len(h1.patches)
+def ComparePCount(h1):
+ return - len(h1.patches)
def ReportByPCount(hlist, cscount):
- hlist.sort(ComparePCount)
+ hlist = sorted(hlist, key=ComparePCount)
count = 0
BeginReport('Developers with the most changesets')
for h in hlist:
@@ -87,11 +87,11 @@ def ReportByPCount(hlist, cscount):
break
EndReport()
-def CompareLChanged(h1, h2):
- return h2.changed - h1.changed
+def CompareLChanged(h1):
+ return - h1.changed
def ReportByLChanged(hlist, totalchanged):
- hlist.sort(CompareLChanged)
+ hlist = sorted(hlist, key=CompareLChanged)
count = 0
BeginReport('Developers with the most changed lines')
for h in hlist:
@@ -103,11 +103,11 @@ def ReportByLChanged(hlist, totalchanged):
break
EndReport()
-def CompareLRemoved(h1, h2):
- return (h2.removed - h2.added) - (h1.removed - h1.added)
+def CompareLRemoved(h1):
+ return h1.added - h1.removed
def ReportByLRemoved(hlist, totalremoved):
- hlist.sort(CompareLRemoved)
+ hlist= sorted(hlist, key=CompareLRemoved)
count = 0
BeginReport('Developers with the most lines removed')
for h in hlist:
@@ -121,11 +121,11 @@ def ReportByLRemoved(hlist, totalremoved):
break
EndReport()
-def CompareEPCount(e1, e2):
- return e2.count - e1.count
+def CompareEPCount(e1):
+ return - e1.count
def ReportByPCEmpl(elist, cscount):
- elist.sort(CompareEPCount)
+ elist = sorted(elist, key=CompareEPCount)
count = 0
BeginReport('Top changeset contributors by employer')
for e in elist:
@@ -137,11 +137,11 @@ def ReportByPCEmpl(elist, cscount):
EndReport()
-def CompareELChanged(e1, e2):
- return e2.changed - e1.changed
+def CompareELChanged(e1):
+ return - e1.changed
def ReportByELChanged(elist, totalchanged):
- elist.sort(CompareELChanged)
+ elist = sorted(elist, key=CompareELChanged)
count = 0
BeginReport('Top lines changed by employer')
for e in elist:
@@ -154,11 +154,11 @@ def ReportByELChanged(elist, totalchanged):
-def CompareSOBs(h1, h2):
- return len(h2.signoffs) - len(h1.signoffs)
+def CompareSOBs(h1):
+ return - len(h1.signoffs)
def ReportBySOBs(hlist):
- hlist.sort(CompareSOBs)
+ hlist = sorted(hlist, key=CompareSOBs)
totalsobs = 0
for h in hlist:
totalsobs += len(h.signoffs)
@@ -176,11 +176,11 @@ def ReportBySOBs(hlist):
#
# Reviewer reporting.
#
-def CompareRevs(h1, h2):
- return len(h2.reviews) - len(h1.reviews)
+def CompareRevs(h1):
+ return - len(h1.reviews)
def ReportByRevs(hlist):
- hlist.sort(CompareRevs)
+ hlist = sorted(hlist, key=CompareRevs)
totalrevs = 0
for h in hlist:
totalrevs += len(h.reviews)
@@ -198,11 +198,11 @@ def ReportByRevs(hlist):
#
# tester reporting.
#
-def CompareTests(h1, h2):
- return len(h2.tested) - len(h1.tested)
+def CompareTests(h1):
+ return - len(h1.tested)
def ReportByTests(hlist):
- hlist.sort(CompareTests)
+ hlist = sorted(hlist, key=CompareTests)
totaltests = 0
for h in hlist:
totaltests += len(h.tested)
@@ -217,11 +217,11 @@ def ReportByTests(hlist):
break
EndReport()
-def CompareTestCred(h1, h2):
- return h2.testcred - h1.testcred
+def CompareTestCred(h1):
+ return - h1.testcred
def ReportByTestCreds(hlist):
- hlist.sort(CompareTestCred)
+ hlist = sorted(hlist, key=CompareTestCred)
totaltests = 0
for h in hlist:
totaltests += h.testcred
@@ -240,11 +240,11 @@ def ReportByTestCreds(hlist):
#
# Reporter reporting.
#
-def CompareReports(h1, h2):
- return len(h2.reports) - len(h1.reports)
+def CompareReports(h1):
+ return - len(h1.reports)
def ReportByReports(hlist):
- hlist.sort(CompareReports)
+ hlist = sorted(hlist, key=CompareReports)
totalreps = 0
for h in hlist:
totalreps += len(h.reports)
@@ -259,11 +259,11 @@ def ReportByReports(hlist):
break
EndReport()
-def CompareRepCred(h1, h2):
- return h2.repcred - h1.repcred
+def CompareRepCred(h1):
+ return - h1.repcred
def ReportByRepCreds(hlist):
- hlist.sort(CompareRepCred)
+ hlist = sorted(hlist, key=CompareRepCred)
totalreps = 0
for h in hlist:
totalreps += h.repcred
@@ -280,13 +280,9 @@ def ReportByRepCreds(hlist):
#
# Versions.
#
-def CompareVersionCounts(h1, h2):
- if h1.versions and h2.versions:
- return len(h2.versions) - len(h1.versions)
- if h2.versions:
- return 1
+def CompareVersionCounts(h1):
if h1.versions:
- return -1
+ return - h1.version
return 0
def MissedVersions(hv, allv):
@@ -295,7 +291,7 @@ def MissedVersions(hv, allv):
return ' '.join(missed)
def ReportVersions(hlist):
- hlist.sort(CompareVersionCounts)
+ hlist = sorted(hlist, CompareVersionCounts)
BeginReport('Developers represented in the most kernel versions')
count = 0
allversions = hlist[0].versions
@@ -307,11 +303,11 @@ def ReportVersions(hlist):
EndReport()
-def CompareESOBs(e1, e2):
- return e2.sobs - e1.sobs
+def CompareESOBs(e1):
+ return - e1.sobs
def ReportByESOBs(elist):
- elist.sort(CompareESOBs)
+ elist = sorted(elist, key=CompareESOBs)
totalsobs = 0
for e in elist:
totalsobs += e.sobs
@@ -325,11 +321,11 @@ def ReportByESOBs(elist):
break
EndReport()
-def CompareHackers(e1, e2):
- return len(e2.hackers) - len(e1.hackers)
+def CompareHackers(e1):
+ return - len(e1.hackers)
def ReportByEHackers(elist):
- elist.sort(CompareHackers)
+ elist = sorted(elist, key=CompareHackers)
totalhackers = 0
for e in elist:
totalhackers += len(e.hackers)
@@ -413,9 +409,9 @@ def ReportByFileType(hacker_list):
total[filetype] = [added, removed, []]
# Print a summary by hacker
- print h.name
+ print(h.name)
for filetype, counters in by_hacker.iteritems():
- print '\t', filetype, counters
+ print('\t', filetype, counters)
h_added = by_hacker[filetype][patch.ADDED]
h_removed = by_hacker[filetype][patch.REMOVED]
total[filetype][2].append([h.name, h_added, h_removed])
@@ -423,14 +419,14 @@ def ReportByFileType(hacker_list):
# Print the global summary
BeginReport('Contributions by type and developers')
for filetype, (added, removed, hackers) in total.iteritems():
- print filetype, added, removed
+ print(filetype, added, removed)
for h, h_added, h_removed in hackers:
- print '\t%s: [%d, %d]' % (h, h_added, h_removed)
+ print('\t%s: [%d, %d]' % (h, h_added, h_removed))
# Print the very global summary
BeginReport('General contributions by type')
for filetype, (added, removed, hackers) in total.iteritems():
- print filetype, added, removed
+ print(filetype, added, removed)
#
# The file access report is a special beast.
diff --git a/stablefixes b/stablefixes
index 6325597..06a36f4 100755
--- a/stablefixes
+++ b/stablefixes
@@ -1,4 +1,4 @@
-#!/usr/bin/pypy
+#!/usr/bin/python3
# -*- python -*-
#
# Read through a set of patches and see how many of them are fixes
@@ -100,8 +100,8 @@ while patch:
FindRefs(patch)
patch = gitlog.grabpatch(sys.stdin)
-print 'Found %d patches, %d fixes' % (Npatches, len(Fixes))
-print '%d had no upstream reference' % (len(UpstreamMissing))
+print('Found %d patches, %d fixes' % (Npatches, len(Fixes)))
+print('%d had no upstream reference' % (len(UpstreamMissing)))
#
# Now see how many fixes have been seen before.
@@ -128,4 +128,4 @@ for id in Ids:
nfound += 1
out.write('</table>')
out.close()
-print 'Found %d refixes' % (nfound)
+print('Found %d refixes' % (nfound))
diff --git a/treeplot b/treeplot
index 688c150..910260a 100755
--- a/treeplot
+++ b/treeplot
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/python3
#
# git log --pretty="%H %P" | this program
# See option descriptions at bottom
@@ -13,7 +13,7 @@
# "git clone --reference" is a relatively easy way to come up with such
# a tree without redownloading the whole mess.
#
-import sys, subprocess, argparse, pickle
+import io, sys, subprocess, argparse, pickle
import graphviz
import patterns
@@ -92,7 +92,8 @@ class Merge:
def ingest_commits(src):
count = 0
expected = 'nothing yet'
- for line in src.readlines():
+ input_stream=io.TextIOWrapper(src.buffer, encoding='utf-8', errors='ignore')
+ for line in input_stream.readlines():
sline = line[:-1].split()
commit = sline[0]
is_merge = (len(sline) > 2)
@@ -222,9 +223,9 @@ def dumptree(start, indent = ''):
int = ''
if start.internal:
int = 'I: '
- print '%s%s%s: %d/%d %s' % (indent, int, start.id[:10],
+ print('%s%s%s: %d/%d %s' % (indent, int, start.id[:10],
len(start.merges), len(start.commits),
- start.tree)
+ start.tree))
for merge in start.merges:
dumptree(merge, indent + ' ')
@@ -237,12 +238,12 @@ def dumpflow(tree, indent = '', seen = []):
srctrees.sort(lambda t1, t2: srcs[t2] - srcs[t1])
for src in srctrees:
if src in seen:
- print 'Skip', src, srcs[src], seen
+ print('Skip', src, srcs[src], seen)
else:
if src in SignedTrees:
- print '%s%4d ** %s' % (indent, srcs[src], src)
+ print('%s%4d ** %s' % (indent, srcs[src], src))
else:
- print '%s%4d %s' % (indent, srcs[src], src)
+ print('%s%4d %s' % (indent, srcs[src], src))
dumpflow(src, indent = indent + ' ', seen = seen + [tree])
def SigStats(tree):
@@ -255,9 +256,9 @@ def SigStats(tree):
else:
upulls += 1
ucommits += srcs[src]
- print '%d repos total, %d signed, %d unsigned' % (spulls + upulls,
- spulls, upulls)
- print ' %d commits from signed, %d from unsigned' % (scommits, ucommits)
+ print('%d repos total, %d signed, %d unsigned' % (spulls + upulls,
+ spulls, upulls))
+ print(' %d commits from signed, %d from unsigned' % (scommits, ucommits))
#
# Graphviz.
@@ -379,11 +380,11 @@ else:
zorch_internals(Mainline)
#dumptree(Mainline)
Treecounts['mainline'] = { 'Applied by Linus': len(Mainline.commits) }
-print 'total commits', count_commits(Mainline)
+print('total commits', count_commits(Mainline))
tree_stats(Mainline)
if args.trim:
trim_trees(args.trim)
-print 'Tree flow'
+print('Tree flow')
dumpflow('mainline')
if args.gvoutput:
GV_out(args.gvoutput)
--
2.36.1
More information about the U-Boot
mailing list