Signed-off-by: Petr Vorel <[email protected]>
---
nfs4.0/testserver.py | 7 ++++++-
nfs4.1/testmod.py | 40 +++++++++++++++++++++++++++++++++++++++-
nfs4.1/testserver.py | 6 +++++-
3 files changed, 50 insertions(+), 3 deletions(-)
diff --git a/nfs4.0/testserver.py b/nfs4.0/testserver.py
index 0ef010a..f2c4156 100755
--- a/nfs4.0/testserver.py
+++ b/nfs4.0/testserver.py
@@ -85,6 +85,8 @@ def scan_options(p):
help="Skip final cleanup of test directory")
p.add_option("--outfile", "--out", default=None, metavar="FILE",
help="Store test results in FILE [%default]")
+ p.add_option("--jsonout", "--json", default=None, metavar="FILE",
+ help="Store test results in JSON format [%default]")
p.add_option("--xmlout", "--xml", default=None, metavar="FILE",
help="Store test results in xml format [%default]")
p.add_option("--debug_fail", action="store_true", default=False,
@@ -378,8 +380,11 @@ def main():
if fail:
print("\nWARNING: could not clean testdir due to:\n%s\n" % err)
- if opt.xmlout is not None:
+ if opt.jsonout is not None:
+ testmod.json_printresults(tests, opt.jsonout)
+ elif opt.xmlout is not None:
testmod.xml_printresults(tests, opt.xmlout)
+
if nfail < 0:
sys.exit(3)
if nfail > 0:
diff --git a/nfs4.1/testmod.py b/nfs4.1/testmod.py
index e368853..4b4ed24 100644
--- a/nfs4.1/testmod.py
+++ b/nfs4.1/testmod.py
@@ -13,6 +13,7 @@ import re
import sys
import time
from traceback import format_exception, print_exc
+import json
import xml.dom.minidom
import datetime
@@ -467,6 +468,43 @@ def printresults(tests, opts, file=None):
(count[SKIP], count[FAIL], count[WARN], count[PASS]), file=file)
return count[FAIL]
+def json_printresults(tests, file_name, suite='all'):
+ with open(file_name, 'w') as fd:
+ failures = 0
+ skipped = 0
+ total_time = 0
+ data = {}
+ data["tests"] = len(tests)
+ data["errors"] = 0
+ data["timestamp"] = str(datetime.datetime.now())
+ data["name"] = suite
+ data["testcase"] = []
+ for t in tests:
+ test = {
+ "name": t.name,
+ "classname": t.suite,
+ "time": str(t.time_taken),
+ }
+
+ total_time += t.time_taken
+ if t.result == TEST_FAIL:
+ failures += 1
+ test["failure"] = {
+ "message" : t.result.msg,
+ "err" : ''.join(t.result.tb)
+ }
+ elif t.result == TEST_OMIT:
+ skipped += 1
+ test["skipped"] = 1
+
+ data["testcase"].append(test)
+
+ data["failures"] = failures
+ data["skipped"] = skipped
+ data["time"] = total_time
+
+ fd.write(json.dumps(data, indent=4, sort_keys=True))
+
def xml_printresults(tests, file_name, suite='all'):
with open(file_name, 'w') as fd:
failures = 0
@@ -484,7 +522,7 @@ def xml_printresults(tests, file_name, suite='all'):
testsuite.appendChild(testcase)
testcase.setAttribute("name", t.name)
testcase.setAttribute("classname", t.suite)
- testcase.setAttribute("time", str(t.time_taken))
+ testcase.setAttribute("time", t.time_taken)
total_time += t.time_taken
if t.result == TEST_FAIL:
diff --git a/nfs4.1/testserver.py b/nfs4.1/testserver.py
index d3c44c7..085f007 100755
--- a/nfs4.1/testserver.py
+++ b/nfs4.1/testserver.py
@@ -68,6 +68,8 @@ def scan_options(p):
help="Skip final cleanup of test directory")
p.add_option("--outfile", "--out", default=None, metavar="FILE",
help="Store test results in FILE [%default]")
+ p.add_option("--jsonout", "--json", default=None, metavar="FILE",
+ help="Store test results in JSON format [%default]")
p.add_option("--xmlout", "--xml", default=None, metavar="FILE",
help="Store test results in xml format [%default]")
p.add_option("--debug_fail", action="store_true", default=False,
@@ -363,7 +365,9 @@ def main():
if fail:
print("\nWARNING: could not clean testdir due to:\n%s\n" % err)
- if opt.xmlout is not None:
+ if opt.jsonout is not None:
+ testmod.json_printresults(tests, opt.jsonout)
+ elif opt.xmlout is not None:
testmod.xml_printresults(tests, opt.xmlout)
if __name__ == "__main__":
--
2.31.1
I've got nothing against this, but I'm curious why you need it.
--b.
On Mon, May 24, 2021 at 04:42:50PM +0200, Petr Vorel wrote:
> Signed-off-by: Petr Vorel <[email protected]>
> ---
> nfs4.0/testserver.py | 7 ++++++-
> nfs4.1/testmod.py | 40 +++++++++++++++++++++++++++++++++++++++-
> nfs4.1/testserver.py | 6 +++++-
> 3 files changed, 50 insertions(+), 3 deletions(-)
>
> diff --git a/nfs4.0/testserver.py b/nfs4.0/testserver.py
> index 0ef010a..f2c4156 100755
> --- a/nfs4.0/testserver.py
> +++ b/nfs4.0/testserver.py
> @@ -85,6 +85,8 @@ def scan_options(p):
> help="Skip final cleanup of test directory")
> p.add_option("--outfile", "--out", default=None, metavar="FILE",
> help="Store test results in FILE [%default]")
> + p.add_option("--jsonout", "--json", default=None, metavar="FILE",
> + help="Store test results in JSON format [%default]")
> p.add_option("--xmlout", "--xml", default=None, metavar="FILE",
> help="Store test results in xml format [%default]")
> p.add_option("--debug_fail", action="store_true", default=False,
> @@ -378,8 +380,11 @@ def main():
> if fail:
> print("\nWARNING: could not clean testdir due to:\n%s\n" % err)
>
> - if opt.xmlout is not None:
> + if opt.jsonout is not None:
> + testmod.json_printresults(tests, opt.jsonout)
> + elif opt.xmlout is not None:
> testmod.xml_printresults(tests, opt.xmlout)
> +
> if nfail < 0:
> sys.exit(3)
> if nfail > 0:
> diff --git a/nfs4.1/testmod.py b/nfs4.1/testmod.py
> index e368853..4b4ed24 100644
> --- a/nfs4.1/testmod.py
> +++ b/nfs4.1/testmod.py
> @@ -13,6 +13,7 @@ import re
> import sys
> import time
> from traceback import format_exception, print_exc
> +import json
> import xml.dom.minidom
> import datetime
>
> @@ -467,6 +468,43 @@ def printresults(tests, opts, file=None):
> (count[SKIP], count[FAIL], count[WARN], count[PASS]), file=file)
> return count[FAIL]
>
> +def json_printresults(tests, file_name, suite='all'):
> + with open(file_name, 'w') as fd:
> + failures = 0
> + skipped = 0
> + total_time = 0
> + data = {}
> + data["tests"] = len(tests)
> + data["errors"] = 0
> + data["timestamp"] = str(datetime.datetime.now())
> + data["name"] = suite
> + data["testcase"] = []
> + for t in tests:
> + test = {
> + "name": t.name,
> + "classname": t.suite,
> + "time": str(t.time_taken),
> + }
> +
> + total_time += t.time_taken
> + if t.result == TEST_FAIL:
> + failures += 1
> + test["failure"] = {
> + "message" : t.result.msg,
> + "err" : ''.join(t.result.tb)
> + }
> + elif t.result == TEST_OMIT:
> + skipped += 1
> + test["skipped"] = 1
> +
> + data["testcase"].append(test)
> +
> + data["failures"] = failures
> + data["skipped"] = skipped
> + data["time"] = total_time
> +
> + fd.write(json.dumps(data, indent=4, sort_keys=True))
> +
> def xml_printresults(tests, file_name, suite='all'):
> with open(file_name, 'w') as fd:
> failures = 0
> @@ -484,7 +522,7 @@ def xml_printresults(tests, file_name, suite='all'):
> testsuite.appendChild(testcase)
> testcase.setAttribute("name", t.name)
> testcase.setAttribute("classname", t.suite)
> - testcase.setAttribute("time", str(t.time_taken))
> + testcase.setAttribute("time", t.time_taken)
>
> total_time += t.time_taken
> if t.result == TEST_FAIL:
> diff --git a/nfs4.1/testserver.py b/nfs4.1/testserver.py
> index d3c44c7..085f007 100755
> --- a/nfs4.1/testserver.py
> +++ b/nfs4.1/testserver.py
> @@ -68,6 +68,8 @@ def scan_options(p):
> help="Skip final cleanup of test directory")
> p.add_option("--outfile", "--out", default=None, metavar="FILE",
> help="Store test results in FILE [%default]")
> + p.add_option("--jsonout", "--json", default=None, metavar="FILE",
> + help="Store test results in JSON format [%default]")
> p.add_option("--xmlout", "--xml", default=None, metavar="FILE",
> help="Store test results in xml format [%default]")
> p.add_option("--debug_fail", action="store_true", default=False,
> @@ -363,7 +365,9 @@ def main():
> if fail:
> print("\nWARNING: could not clean testdir due to:\n%s\n" % err)
>
> - if opt.xmlout is not None:
> + if opt.jsonout is not None:
> + testmod.json_printresults(tests, opt.jsonout)
> + elif opt.xmlout is not None:
> testmod.xml_printresults(tests, opt.xmlout)
>
> if __name__ == "__main__":
> --
> 2.31.1
>
Hi Bruce,
> I've got nothing against this, but I'm curious why you need it.
Well, I can integrate it via XML, but JSON is just a bit more readable.
So it's up to you.
Having code in XML (and JSON if accepted) would be nice.
I prefer to have well defined format instead of parsing showresults.py output
(originally I though I'd add JSON to showresults.py, but then I noticed
--xml in testserver.py).
Kind regards,
Petr
> --b.