Now integration tests execute command sudo -u hadoop bash -lc "hadoop dfsadmin -report | grep "Datanodes available:.*" | awk '{print $3}'" to check how many data nodes are active.
This command does not work with any Vanilla2 images created with elements from sahara-image-elements repo.
Trace of tests is
======================================================================
FAIL: tests.gating.test_vanilla_two_gating.VanillaTwoGatingTest.test_vanilla_two_plugin_gating[vanilla2]
tags: worker-0
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/skolekonov/sahara/sahara/tests/integration/tests/gating/test_vanilla_two_gating.py", line 227, in test_vanilla_two_plugin_gating
self._create_cluster()
File "sahara/tests/integration/tests/base.py", line 46, in wrapper
ITestCase.print_error_log(message, e)
File "sahara/openstack/common/excutils.py", line 68, in __exit__
six.reraise(self.type_, self.value, self.tb)
File "sahara/tests/integration/tests/base.py", line 43, in wrapper
fct(*args, **kwargs)
File "/home/skolekonov/sahara/sahara/tests/integration/tests/gating/test_vanilla_two_gating.py", line 157, in _create_cluster
self.vanilla_two_config)
File "sahara/tests/integration/tests/base.py", line 338, in await_active_workers_for_namenode
% self.common_config.HDFS_INITIALIZATION_TIMEOUT
File "/home/skolekonov/sahara/.tox/integration/local/lib/python2.7/site-packages/unittest2/case.py", line 415, in fail
raise self.failureException(msg)
AssertionError: Tasktracker or datanode cannot be started within 5 minute(s) for namenode.
A possible solution is to use the following command sudo -u hadoop bash -lc "hadoop dfsadmin -report" | grep "Datanodes available:.*" | awk '{print $3}'.
Now integration tests execute command sudo -u hadoop bash -lc "hadoop dfsadmin -report | grep "Datanodes available:.*" | awk '{print $3}'" to check how many data nodes are active.
This command does not work with any Vanilla2 images created with elements from sahara- image-elements repo.
Trace of tests is ======= ======= ======= ======= ======= ======= ======= ======= ======= test_vanilla_ two_gating. VanillaTwoGatin gTest.test_ vanilla_ two_plugin_ gating[ vanilla2] ------- ------- ------- ------- ------- ------- ------- ------- ------- skolekonov/ sahara/ sahara/ tests/integrati on/tests/ gating/ test_vanilla_ two_gating. py", line 227, in test_vanilla_ two_plugin_ gating _create_ cluster( ) tests/integrati on/tests/ base.py" , line 46, in wrapper print_error_ log(message, e) openstack/ common/ excutils. py", line 68, in __exit__ reraise( self.type_ , self.value, self.tb) tests/integrati on/tests/ base.py" , line 43, in wrapper skolekonov/ sahara/ sahara/ tests/integrati on/tests/ gating/ test_vanilla_ two_gating. py", line 157, in _create_cluster vanilla_ two_config) tests/integrati on/tests/ base.py" , line 338, in await_active_ workers_ for_namenode config. HDFS_INITIALIZA TION_TIMEOUT skolekonov/ sahara/ .tox/integratio n/local/ lib/python2. 7/site- packages/ unittest2/ case.py" , line 415, in fail eption( msg)
=======
FAIL: tests.gating.
tags: worker-0
-------
Traceback (most recent call last):
File "/home/
self.
File "sahara/
ITestCase.
File "sahara/
six.
File "sahara/
fct(*args, **kwargs)
File "/home/
self.
File "sahara/
% self.common_
File "/home/
raise self.failureExc
AssertionError: Tasktracker or datanode cannot be started within 5 minute(s) for namenode.
A possible solution is to use the following command sudo -u hadoop bash -lc "hadoop dfsadmin -report" | grep "Datanodes available:.*" | awk '{print $3}'.