See <
http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/...
------------------------------------------
[...truncated 4395 lines...]
suites/replication/acceptance_test.py::test_modrdn_entry[1] PASSED
suites/replication/acceptance_test.py::test_modrdn_after_pause PASSED
suites/replication/acceptance_test.py::test_modify_stripattrs PASSED
suites/replication/acceptance_test.py::test_new_suffix PASSED
suites/replication/acceptance_test.py::test_many_attrs PASSED
suites/replication/acceptance_test.py::test_double_delete PASSED
suites/replication/acceptance_test.py::test_password_repl_error PASSED
suites/replication/cleanallruv_test.py::test_cleanallruv_init PASSED
suites/replication/cleanallruv_test.py::test_cleanallruv_clean PASSED
suites/replication/cleanallruv_test.py::test_cleanallruv_clean_restart PASSED
suites/replication/cleanallruv_test.py::test_cleanallruv_clean_force PASSED
suites/replication/cleanallruv_test.py::test_cleanallruv_abort PASSED
suites/replication/cleanallruv_test.py::test_cleanallruv_abort_restart PASSED
suites/replication/cleanallruv_test.py::test_cleanallruv_abort_certify PASSED
suites/replication/cleanallruv_test.py::test_cleanallruv_stress_clean PASSED
suites/replication/single_master_test.py::test_mail_attr_repl PASSED
suites/replication/single_master_test.py::test_lastupdate_attr_before_init PASSED
suites/replication/tombstone_test.py::test_purge_success PASSED
suites/replication/wait_for_async_feature_test.py::test_not_int_value PASSED
suites/replication/wait_for_async_feature_test.py::test_multi_value PASSED
suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr0]
PASSED
suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr1]
PASSED
suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr2]
PASSED
suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr3]
PASSED
suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr0]
PASSED
suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr1]
PASSED
suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr2]
PASSED
suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr3]
PASSED
suites/schema/test_schema.py::test_schema_comparewithfiles PASSED
suites/setup_ds/setup_ds_test.py::test_slapd_InstScriptsEnabled[true] PASSED
suites/setup_ds/setup_ds_test.py::test_slapd_InstScriptsEnabled[false] PASSED
=================================== FAILURES ===================================
_______________________________ test_ticket47931 _______________________________
topology_st = <lib389.topologies.TopologyMain object at 0x7f7acc2f6190>
def test_ticket47931(topology_st):
"""Test Retro Changelog and MemberOf deadlock fix.
Verification steps:
- Enable retro cl and memberOf.
- Create two backends: A & B.
- Configure retrocl scoping for backend A.
- Configure memberOf plugin for uniquemember
- Create group in backend A.
- In parallel, add members to the group on A, and make modifications
to entries in backend B.
- Make sure the server does not hang during the updates to both
backends.
"""
# Enable dynamic plugins to make plugin configuration easier
try:
topology_st.standalone.modify_s(DN_CONFIG,
[(ldap.MOD_REPLACE,
'\''nsslapd-dynamic-plugins'\'',
'\''on'\'')])
except ldap.LDAPError as e:
log.error('\''Failed to enable dynamic plugins! '\'' +
e.message['\''desc'\''])
assert False
# Enable the plugins
topology_st.standalone.plugins.enable(name=PLUGIN_MEMBER_OF)
topology_st.standalone.plugins.enable(name=PLUGIN_RETRO_CHANGELOG)
# Create second backend
topology_st.standalone.backend.create(SECOND_SUFFIX, {BACKEND_NAME:
SECOND_BACKEND})
topology_st.standalone.mappingtree.create(SECOND_SUFFIX, bename=SECOND_BACKEND)
# Create the root node of the second backend
try:
topology_st.standalone.add_s(Entry((SECOND_SUFFIX,
{'\''objectclass'\'': '\''top
domain'\''.split(),
'\''dc'\'':
'\''deadlock'\''})))
except ldap.LDAPError as e:
log.fatal('\''Failed to create suffix entry: error
'\'' + e.message['\''desc'\''])
assert False
# Configure retrocl scope
try:
topology_st.standalone.modify_s(RETROCL_PLUGIN_DN,
[(ldap.MOD_REPLACE,
'\''nsslapd-include-suffix'\'',
DEFAULT_SUFFIX)])
except ldap.LDAPError as e:
log.error('\''Failed to configure retrocl plugin: '\''
+ e.message['\''desc'\''])
assert False
# Configure memberOf group attribute
try:
topology_st.standalone.modify_s(MEMBEROF_PLUGIN_DN,
[(ldap.MOD_REPLACE,
'\''memberofgroupattr'\'',
'\''uniquemember'\'')])
except ldap.LDAPError as e:
log.fatal('\''Failed to configure memberOf plugin: error
'\'' + e.message['\''desc'\''])
assert False
# Create group
try:
topology_st.standalone.add_s(Entry((GROUP_DN,
{'\''objectclass'\'': '\''top
extensibleObject'\''.split(),
'\''cn'\'':
'\''group'\''})))
except ldap.LDAPError as e:
log.fatal('\''Failed to add grouo: error '\'' +
e.message['\''desc'\''])
assert False
# Create 1500 entries (future members of the group)
for idx in range(1, 1500):
try:
USER_DN = ("uid=member%d,%s" % (idx, DEFAULT_SUFFIX))
topology_st.standalone.add_s(Entry((USER_DN,
{'\''objectclass'\'': '\''top
extensibleObject'\''.split(),
'\''uid'\'':
'\''member%d'\'' % (x)})))
except ldap.LDAPError as e:
log.fatal('\''Failed to add user (%s): error
%s'\'' % (USER_DN, e.message['\''desc'\'']))
assert False
# Modify second backend (separate thread)
mod_backend_thrd = modifySecondBackendThread(topology_st.standalone, TIME_OUT)
mod_backend_thrd.start()
# Add members to the group - set timeout
log.info('\''Adding members to the group...'\'')
topology_st.standalone.set_option(ldap.OPT_TIMEOUT, TIME_OUT)
for idx in range(1, 1500):
try:
MEMBER_VAL = ("uid=member%d,%s" % (idx, DEFAULT_SUFFIX))
topology_st.standalone.modify_s(GROUP_DN,
[(ldap.MOD_ADD,
'\''uniquemember'\'',
MEMBER_VAL)])
except ldap.TIMEOUT:
log.fatal('\''Deadlock! Bug verification
failed.'\'')
assert False
E assert False
tickets/ticket47931_test.py:148: AssertionError
---------------------------- Captured stdout setup -----------------------------
OK group dirsrv exists
OK user dirsrv exists
----------------------------- Captured stderr call -----------------------------
INFO:lib389:List backend with suffix=dc=deadlock
INFO:lib389:Creating a local backend
INFO:lib389:List backend cn=deadlock,cn=ldbm database,cn=plugins,cn=config
INFO:lib389:Found entry dn: cn=deadlock,cn=ldbm database,cn=plugins,cn=config
cn: deadlock
nsslapd-cachememsize: 512000
nsslapd-cachesize: -1
nsslapd-directory: /var/lib/dirsrv/slapd-standalone_1/db/deadlock
nsslapd-dncachememsize: 16777216
nsslapd-readonly: off
nsslapd-require-index: off
nsslapd-suffix: dc=deadlock
objectClass: top
objectClass: extensibleObject
objectClass: nsBackendInstance
INFO:lib389:Entry dn: cn="dc=deadlock",cn=mapping tree,cn=config
cn: dc=deadlock
nsslapd-backend: deadlock
nsslapd-state: backend
objectclass: top
objectclass: extensibleObject
objectclass: nsMappingTree
INFO:lib389:Found entry dn: cn=dc\3Ddeadlock,cn=mapping tree,cn=config
cn: dc=deadlock
nsslapd-backend: deadlock
nsslapd-state: backend
objectClass: top
objectClass: extensibleObject
objectClass: nsMappingTree
INFO:dirsrvtests.tests.tickets.ticket47931_test:Adding members to the group...
INFO:dirsrvtests.tests.tickets.ticket47931_test:Modify second suffix...
CRITICAL:dirsrvtests.tests.tickets.ticket47931_test:Deadlock! Bug verification failed.
_______________________________ test_ticket49039 _______________________________
topo = <lib389.topologies.TopologyMain object at 0x7f7ac4b25ed0>
def test_ticket49039(topo):
"""Test "password must change" verses "password min
age". Min age should not
block password update if the password was reset.
"""
# Setup SSL (for ldappasswd test)
ssl_init(topo)
<
http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/...:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
<
http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/...:
in ssl_init
topo.standalone.restart()
<
http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/...:
in restart
self.start(timeout, post_open)
<
http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/...:
in start
"dirsrv@%s" % self.serverid])
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
popenargs = (['\''/usr/bin/systemctl'\'',
'\''start'\'',
'\''dirsrv@standalone_1'\''],)
kwargs = {}, retcode = 1
cmd = ['\''/usr/bin/systemctl'\'',
'\''start'\'',
'\''dirsrv@standalone_1'\'']
def check_call(*popenargs, **kwargs):
"""Run command with arguments. Wait for command to complete. If
the exit code was zero then return, otherwise raise
CalledProcessError. The CalledProcessError object will have the
return code in the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
check_call(["ls", "-l"])
"""
retcode = call(*popenargs, **kwargs)
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd)
E
CalledProcessError: Command
'\''['\''/usr/bin/systemctl'\'',
'\''start'\'',
'\''dirsrv@standalone_1'\'']'\'' returned non-zero
exit status 1
/usr/lib64/python2.7/subprocess.py:186: CalledProcessError
---------------------------- Captured stdout setup -----------------------------
OK group dirsrv exists
OK user dirsrv exists
----------------------------- Captured stderr call -----------------------------
Generating key. This may take a few moments...
Generating key. This may take a few moments...
Job for dirsrv(a)standalone_1.service failed because the control process exited with error
code. See "systemctl status dirsrv(a)standalone_1.service" and "journalctl
-xe" for details.
=================== 2 failed, 507 passed in 10864.16 seconds ==================='
+ '[' 1 -ne 0 ']'
+ echo CI Tests 'FAILED!'
CI Tests FAILED!
+ MSG=FAILED
+ RC=1
+ sudo /usr/sbin/sendmail mreynolds(a)redhat.com firstyear(a)redhat.com
+ sudo rm -rf /var/tmp/slapd.vg.25523 /var/tmp/slapd.vg.25615 /var/tmp/slapd.vg.34157
/var/tmp/slapd.vg.3666
+ exit 1
Build step 'Execute shell' marked build as failure