2016-05-23 19:50:08 +02:00
|
|
|
#!/usr/bin/env python
|
|
|
|
from unittest import *
|
|
|
|
from console.include.common_functions_60 import *
|
|
|
|
from console.include.common_classes_60 import *
|
2016-05-27 18:26:47 +02:00
|
|
|
from sauceclient import SauceClient
|
2016-08-19 13:17:47 +02:00
|
|
|
import testtools
|
2016-07-12 09:35:02 +02:00
|
|
|
from os import environ, getenv
|
2016-05-27 18:26:47 +02:00
|
|
|
import subprocess, time, sys
|
2016-05-23 19:50:08 +02:00
|
|
|
|
|
|
|
|
2016-08-19 13:17:47 +02:00
|
|
|
class ArticaTestSuite(TestSuite):
|
|
|
|
def __init__(self,*args,**kwargs):
|
|
|
|
super(ArticaTestSuite,self).__init__(*args,**kwargs)
|
|
|
|
|
|
|
|
def run(self,*args,**kwargs):
|
2016-08-19 15:48:02 +02:00
|
|
|
#print "Running "+str(self.countTestCases())+" tests."
|
|
|
|
#print "Tests are: "+str(self._tests)
|
2016-08-19 13:17:47 +02:00
|
|
|
super(ArticaTestSuite,self).run(*args,**kwargs)
|
2016-07-19 17:23:04 +02:00
|
|
|
|
2016-08-19 13:17:47 +02:00
|
|
|
class ArticaTestLoader(TestLoader):
|
|
|
|
def __init__(self,*args,**kwargs):
|
|
|
|
self.suiteClass = ArticaTestSuite
|
|
|
|
super(ArticaTestLoader, self).__init__(*args,**kwargs)
|
|
|
|
|
2016-07-19 17:23:04 +02:00
|
|
|
|
2016-08-19 16:56:48 +02:00
|
|
|
def split_suite_into_chunks(n, suite):
|
|
|
|
import math
|
|
|
|
|
|
|
|
# Keep n to a reasonable number of threads
|
|
|
|
if n < 0:
|
|
|
|
n = 1
|
|
|
|
if n > 8:
|
|
|
|
n = 8
|
|
|
|
# Compute n such that the number of threads does not exceed the value passed to the function
|
|
|
|
n = math.ceil(suite.countTestCases() / n)
|
2016-08-19 13:17:47 +02:00
|
|
|
s = []
|
2016-08-19 16:56:48 +02:00
|
|
|
i = 0
|
2016-08-19 13:17:47 +02:00
|
|
|
s_tmp = ArticaTestSuite()
|
|
|
|
for case in suite:
|
2016-08-19 16:56:48 +02:00
|
|
|
if i < n:
|
|
|
|
s_tmp.addTest(case)
|
|
|
|
i += 1
|
|
|
|
if i == n:
|
2016-08-19 13:17:47 +02:00
|
|
|
s.append([s_tmp, None])
|
2016-08-19 16:56:48 +02:00
|
|
|
i = 0
|
2016-08-19 13:17:47 +02:00
|
|
|
s_tmp = ArticaTestSuite()
|
2016-08-19 16:56:48 +02:00
|
|
|
if (i > 0):
|
|
|
|
s.append([s_tmp, None])
|
2016-08-19 13:17:47 +02:00
|
|
|
return s
|
2016-05-27 18:26:47 +02:00
|
|
|
|
2016-08-19 13:17:47 +02:00
|
|
|
class TracingStreamResult(testtools.StreamResult):
|
|
|
|
failures = []
|
|
|
|
success = []
|
|
|
|
skipped = []
|
|
|
|
errors = []
|
2016-08-10 12:15:47 +02:00
|
|
|
|
2016-08-19 13:17:47 +02:00
|
|
|
def status(self, test_status, test_id, *args, **kwargs):
|
|
|
|
if test_status=='inprogress':
|
|
|
|
print "Running test "+str(test_id)
|
2016-05-23 19:50:08 +02:00
|
|
|
|
2016-08-19 13:17:47 +02:00
|
|
|
elif test_status=='xfail' or test_status=='fail' or test_status=='exists':
|
|
|
|
print "Test "+str(test_id)+" has failed"
|
|
|
|
self.failures.append(test_id)
|
2016-08-10 12:15:47 +02:00
|
|
|
|
2016-08-19 13:17:47 +02:00
|
|
|
elif test_status=='uxsuccess' or test_status=='success':
|
|
|
|
print "Test "+str(test_id)+" has succeeded"
|
|
|
|
self.success.append(test_id)
|
2016-08-10 12:15:47 +02:00
|
|
|
|
2016-08-19 13:17:47 +02:00
|
|
|
elif test_status=='exists':
|
|
|
|
print "Test "+str(test_id)+" has failed (already existed)"
|
|
|
|
self.errors.append(test_id)
|
|
|
|
|
|
|
|
elif test_status=='skip':
|
|
|
|
print "Test "+str(test_id)+" was skipped"
|
|
|
|
self.skipped.append('test_id')
|
|
|
|
|
|
|
|
is_enterprise = '1' == getenv('ENTERPRISE', False)
|
2016-08-19 16:04:28 +02:00
|
|
|
num_threads = 0
|
|
|
|
|
2016-08-19 13:17:47 +02:00
|
|
|
if is_enterprise:
|
|
|
|
num_threads = 2
|
|
|
|
else:
|
2016-08-19 15:48:02 +02:00
|
|
|
num_threads = 3
|
2016-08-19 16:21:14 +02:00
|
|
|
|
2016-08-19 13:17:47 +02:00
|
|
|
a = ArticaTestLoader()
|
2016-08-10 12:15:47 +02:00
|
|
|
|
2016-08-19 13:17:47 +02:00
|
|
|
#Network server tests
|
|
|
|
tests = a.discover(start_dir='console',pattern='*.py')
|
|
|
|
|
|
|
|
print str(tests.countTestCases())+" tests found"
|
2016-08-19 16:04:28 +02:00
|
|
|
print "Using "+str(num_threads)+" threads"
|
2016-08-19 13:17:47 +02:00
|
|
|
|
|
|
|
concurrent_suite = testtools.ConcurrentStreamTestSuite(lambda: (split_suite_into_chunks(num_threads, tests)))
|
|
|
|
result = TracingStreamResult()
|
|
|
|
|
|
|
|
try:
|
|
|
|
result.startTestRun()
|
|
|
|
finally:
|
|
|
|
concurrent_suite.run(result)
|
|
|
|
|
|
|
|
print "SUMMARY"
|
|
|
|
print "======="
|
|
|
|
print "Tests failed: %s" % result.failures
|
|
|
|
print "Tests succeeded: %s" % result.success
|
|
|
|
print "Tests skipped: %s" % result.skipped
|
|
|
|
print "Tests with errors: %s" % result.errors
|
|
|
|
print "======="
|
|
|
|
|
|
|
|
if (len(result.failures)+len(result.errors)) != 0:
|
|
|
|
sys.exit(1)
|
2016-05-27 21:34:36 +02:00
|
|
|
else:
|
|
|
|
sys.exit(0)
|