+ export WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + [[ openshift-3.10-release =~ openshift-.* ]] + [[ openshift-3.10-release =~ .*-crio-.* ]] + export KUBEVIRT_PROVIDER=os-3.10.0 + KUBEVIRT_PROVIDER=os-3.10.0 + export KUBEVIRT_NUM_NODES=2 + KUBEVIRT_NUM_NODES=2 + export NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + export NAMESPACE=kube-system + NAMESPACE=kube-system + trap '{ make cluster-down; }' EXIT SIGINT SIGTERM SIGSTOP + make cluster-down ./cluster/down.sh + make cluster-up ./cluster/up.sh Downloading ....... Downloading ....... 2018/08/03 02:17:48 Waiting for host: 192.168.66.102:22 2018/08/03 02:17:51 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/08/03 02:17:59 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/08/03 02:18:07 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/08/03 02:18:15 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/08/03 02:18:20 Connected to tcp://192.168.66.102:22 + systemctl stop origin-node.service + rm -rf /etc/origin/ /etc/etcd/ /var/lib/origin /var/lib/etcd/ ++ docker ps -q + containers= + '[' -n '' ']' ++ docker ps -q -a + containers='2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3' + '[' -n '2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3' ']' + docker rm -f 2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3 2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3 2018/08/03 02:18:26 Waiting for host: 192.168.66.101:22 2018/08/03 02:18:29 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/08/03 02:18:37 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/08/03 02:18:45 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/08/03 02:18:50 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: connection refused. Sleeping 5s 2018/08/03 02:18:55 Connected to tcp://192.168.66.101:22 + inventory_file=/root/inventory + openshift_ansible=/root/openshift-ansible + echo '[new_nodes]' + sed -i '/\[OSEv3:children\]/a new_nodes' /root/inventory + nodes_found=false ++ seq 2 100 + for i in '$(seq 2 100)' ++ printf node%02d 2 + node=node02 ++ printf 192.168.66.1%02d 2 + node_ip=192.168.66.102 + set +e + ping 192.168.66.102 -c 1 PING 192.168.66.102 (192.168.66.102) 56(84) bytes of data. 64 bytes from 192.168.66.102: icmp_seq=1 ttl=64 time=0.691 ms --- 192.168.66.102 ping statistics --- 1 packets transmitted, 1 received, 0% packet loss, time 0ms rtt min/avg/max/mdev = 0.691/0.691/0.691/0.000 ms + '[' 0 -ne 0 ']' Found node02. Adding it to the inventory. + nodes_found=true + set -e + echo '192.168.66.102 node02' + echo 'Found node02. Adding it to the inventory.' + echo 'node02 openshift_node_group_name="node-config-compute" openshift_schedulable=true openshift_ip=192.168.66.102' + for i in '$(seq 2 100)' ++ printf node%02d 3 + node=node03 ++ printf 192.168.66.1%02d 3 + node_ip=192.168.66.103 + set +e + ping 192.168.66.103 -c 1 PING 192.168.66.103 (192.168.66.103) 56(84) bytes of data. From 192.168.66.101 icmp_seq=1 Destination Host Unreachable --- 192.168.66.103 ping statistics --- 1 packets transmitted, 0 received, +1 errors, 100% packet loss, time 0ms + '[' 1 -ne 0 ']' + break + '[' true = true ']' + ansible-playbook -i /root/inventory /root/openshift-ansible/playbooks/openshift-node/scaleup.yml PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) PLAY [Ensure there are new_nodes] ********************************************** TASK [fail] ******************************************************************** skipping: [localhost] TASK [fail] ******************************************************************** skipping: [localhost] PLAY [Initialization Checkpoint Start] ***************************************** TASK [Set install initialization 'In Progress'] ******************************** ok: [node02] PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) [WARNING]: Could not match supplied host pattern, ignoring: oo_lb_to_config PLAY [Ensure that all non-node hosts are accessible] *************************** TASK [Gathering Facts] ********************************************************* ok: [node01] PLAY [Initialize basic host facts] ********************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/deprecations.yml for node01, node02 TASK [openshift_sanitize_inventory : Check for usage of deprecated variables] *** ok: [node02] ok: [node01] TASK [openshift_sanitize_inventory : debug] ************************************ skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_stats] ******************************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Assign deprecated variables to correct counterparts] *** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_logging.yml for node01, node02 included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_metrics.yml for node01, node02 TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* ok: [node02] ok: [node01] TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Standardize on latest variable names] ***** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Normalize openshift_release] ************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Abort when openshift_release is invalid] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/unsupported.yml for node01, node02 TASK [openshift_sanitize_inventory : Ensure that openshift_use_dnsmasq is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that openshift_node_dnsmasq_install_network_manager_hook is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* skipping: [node01] => (item=openshift_hosted_etcd_storage_kind) skipping: [node02] => (item=openshift_hosted_etcd_storage_kind) TASK [openshift_sanitize_inventory : Ensure that dynamic provisioning is set if using dynamic storage] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure clusterid is set along with the cloudprovider] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure ansible_service_broker_remove and ansible_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure template_service_broker_remove and template_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that all requires vsphere configuration variables are set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : ensure provider configuration variables are defined] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure removed web console extension variables are not set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that web console port matches API server port] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : At least one master is schedulable] ******* skipping: [node01] skipping: [node02] TASK [Detecting Operating System from ostree_booted] *************************** ok: [node02] ok: [node01] TASK [set openshift_deployment_type if unset] ********************************** skipping: [node01] skipping: [node02] TASK [check for node already bootstrapped] ************************************* ok: [node02] ok: [node01] TASK [initialize_facts set fact openshift_is_bootstrapped] ********************* ok: [node01] ok: [node02] TASK [initialize_facts set fact openshift_is_atomic and openshift_is_containerized] *** ok: [node01] ok: [node02] TASK [Determine Atomic Host Docker Version] ************************************ skipping: [node01] skipping: [node02] TASK [assert atomic host docker version is 1.12 or later] ********************** skipping: [node01] skipping: [node02] PLAY [Retrieve existing master configs and validate] *************************** TASK [openshift_control_plane : stat] ****************************************** ok: [node01] TASK [openshift_control_plane : slurp] ***************************************** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [openshift_control_plane : Check for file paths outside of /etc/origin/master in master's config] *** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Initialize special first-master variables] ******************************* TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Disable web console if required] ***************************************** TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Setup yum repositories for all hosts] ************************************ TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Install Red Hat Subscription manager] ******************* skipping: [node02] TASK [rhel_subscribe : Is host already registered?] **************************** skipping: [node02] TASK [rhel_subscribe : Register host] ****************************************** skipping: [node02] TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Determine if OpenShift Pool Already Attached] *********** skipping: [node02] TASK [rhel_subscribe : Attach to OpenShift Pool] ******************************* skipping: [node02] TASK [rhel_subscribe : Satellite preparation] ********************************** skipping: [node02] TASK [openshift_repos : openshift_repos detect ostree] ************************* ok: [node02] TASK [openshift_repos : Ensure libselinux-python is installed] ***************** ok: [node02] TASK [openshift_repos : Remove openshift_additional.repo file] ***************** ok: [node02] TASK [openshift_repos : Create any additional repos that are defined] ********** TASK [openshift_repos : include_tasks] ***************************************** skipping: [node02] TASK [openshift_repos : include_tasks] ***************************************** included: /root/openshift-ansible/roles/openshift_repos/tasks/centos_repos.yml for node02 TASK [openshift_repos : Configure origin gpg keys] ***************************** ok: [node02] TASK [openshift_repos : Configure correct origin release repository] *********** ok: [node02] => (item=/root/openshift-ansible/roles/openshift_repos/templates/CentOS-OpenShift-Origin.repo.j2) TASK [openshift_repos : Ensure clean repo cache in the event repos have been changed manually] *** changed: [node02] => { "msg": "First run of openshift_repos" } TASK [openshift_repos : Record that openshift_repos already ran] *************** ok: [node02] RUNNING HANDLER [openshift_repos : refresh cache] ****************************** changed: [node02] PLAY [Install packages necessary for installer] ******************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Determine if chrony is installed] **************************************** [WARNING]: Consider using the yum, dnf or zypper module rather than running rpm. If you need to use command because yum, dnf or zypper is insufficient you can add warn=False to this command task or set command_warnings=False in ansible.cfg to get rid of this message. changed: [node02] TASK [Install ntp package] ***************************************************** skipping: [node02] TASK [Start and enable ntpd/chronyd] ******************************************* changed: [node02] TASK [Ensure openshift-ansible installer package deps are installed] *********** ok: [node02] => (item=iproute) ok: [node02] => (item=dbus-python) ok: [node02] => (item=PyYAML) ok: [node02] => (item=python-ipaddress) ok: [node02] => (item=libsemanage-python) ok: [node02] => (item=yum-utils) ok: [node02] => (item=python-docker) PLAY [Initialize cluster facts] ************************************************ TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [get openshift_current_version] ******************************************* ok: [node02] ok: [node01] TASK [set_fact openshift_portal_net if present on masters] ********************* ok: [node02] ok: [node01] TASK [Gather Cluster facts] **************************************************** changed: [node02] changed: [node01] TASK [Set fact of no_proxy_internal_hostnames] ********************************* skipping: [node01] skipping: [node02] TASK [Initialize openshift.node.sdn_mtu] *************************************** changed: [node02] ok: [node01] PLAY [Initialize etcd host variables] ****************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** ok: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** ok: [node01] => { "msg": "openshift_pkg_version was not defined. Falling back to -3.10.0" } TASK [openshift_version : set_fact] ******************************************** ok: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Verify Requirements] ***************************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [Run variable sanity checks] ********************************************** ok: [node01] TASK [Validate openshift_node_groups and openshift_node_group_name] ************ ok: [node01] PLAY [Initialization Checkpoint End] ******************************************* TASK [Set install initialization 'Complete'] *********************************** ok: [node02] PLAY [Validate node hostnames] ************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Query DNS for IP address of node02] ************************************** ok: [node02] TASK [Validate openshift_hostname when defined] ******************************** skipping: [node02] TASK [Validate openshift_ip exists on node when defined] *********************** skipping: [node02] PLAY [Configure os_firewall] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [os_firewall : Detecting Atomic Host Operating System] ******************** ok: [node02] TASK [os_firewall : Set fact r_os_firewall_is_atomic] ************************** ok: [node02] TASK [os_firewall : Fail - Firewalld is not supported on Atomic Host] ********** skipping: [node02] TASK [os_firewall : Install firewalld packages] ******************************** skipping: [node02] TASK [os_firewall : Ensure iptables services are not enabled] ****************** skipping: [node02] => (item=iptables) skipping: [node02] => (item=ip6tables) TASK [os_firewall : Wait 10 seconds after disabling iptables] ****************** skipping: [node02] TASK [os_firewall : Start and enable firewalld service] ************************ skipping: [node02] TASK [os_firewall : need to pause here, otherwise the firewalld service starting can sometimes cause ssh to fail] *** skipping: [node02] TASK [os_firewall : Restart polkitd] ******************************************* skipping: [node02] TASK [os_firewall : Wait for polkit action to have been created] *************** skipping: [node02] TASK [os_firewall : Ensure firewalld service is not enabled] ******************* ok: [node02] TASK [os_firewall : Wait 10 seconds after disabling firewalld] ***************** skipping: [node02] TASK [os_firewall : Install iptables packages] ********************************* ok: [node02] => (item=iptables) ok: [node02] => (item=iptables-services) TASK [os_firewall : Start and enable iptables service] ************************* ok: [node02 -> node02] => (item=node02) TASK [os_firewall : need to pause here, otherwise the iptables service starting can sometimes cause ssh to fail] *** skipping: [node02] PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [container_runtime : Setup the docker-storage for overlay] **************** skipping: [node02] TASK [container_runtime : Create file system on extra volume device] *********** TASK [container_runtime : Create mount entry for extra volume] ***************** PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** ok: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* ok: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Add http_proxy to /etc/atomic.conf] ****************** skipping: [node02] TASK [container_runtime : Add https_proxy to /etc/atomic.conf] ***************** skipping: [node02] TASK [container_runtime : Add no_proxy to /etc/atomic.conf] ******************** skipping: [node02] TASK [container_runtime : Get current installed Docker version] **************** ok: [node02] TASK [container_runtime : Error out if Docker pre-installed but too old] ******* skipping: [node02] TASK [container_runtime : Error out if requested Docker is too old] ************ skipping: [node02] TASK [container_runtime : Install Docker] ************************************** skipping: [node02] TASK [container_runtime : Ensure docker.service.d directory exists] ************ ok: [node02] TASK [container_runtime : Configure Docker service unit file] ****************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Set registry params] ********************************* skipping: [node02] => (item={u'reg_conf_var': u'ADD_REGISTRY', u'reg_flag': u'--add-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'BLOCK_REGISTRY', u'reg_flag': u'--block-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'INSECURE_REGISTRY', u'reg_flag': u'--insecure-registry', u'reg_fact_val': []}) TASK [container_runtime : Place additional/blocked/insecure registries in /etc/containers/registries.conf] *** skipping: [node02] TASK [container_runtime : Set Proxy Settings] ********************************** skipping: [node02] => (item={u'reg_conf_var': u'HTTP_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'HTTPS_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'NO_PROXY', u'reg_fact_val': u''}) TASK [container_runtime : Set various Docker options] ************************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Configure Docker Network OPTIONS] ******************** ok: [node02] TASK [container_runtime : Detect if docker is already started] ***************** ok: [node02] TASK [container_runtime : Start the Docker service] **************************** ok: [node02] TASK [container_runtime : set_fact] ******************************************** ok: [node02] TASK [container_runtime : Check for docker_storage_path/overlay2] ************** ok: [node02] TASK [container_runtime : Fixup SELinux permissions for docker] **************** changed: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* ok: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** ok: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** ok: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] TASK [container_runtime : Fail if Atomic Host since this is an rpm request] **** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Check that overlay is in the kernel] ***************** skipping: [node02] TASK [container_runtime : Add overlay to modprobe.d] *************************** skipping: [node02] TASK [container_runtime : Manually modprobe overlay into the kernel] *********** skipping: [node02] TASK [container_runtime : Enable and start systemd-modules-load] *************** skipping: [node02] TASK [container_runtime : Install cri-o] *************************************** skipping: [node02] TASK [container_runtime : Remove CRI-O default configuration files] ************ skipping: [node02] => (item=/etc/cni/net.d/200-loopback.conf) skipping: [node02] => (item=/etc/cni/net.d/100-crio-bridge.conf) TASK [container_runtime : Create the CRI-O configuration] ********************** skipping: [node02] TASK [container_runtime : Ensure CNI configuration directory exists] *********** skipping: [node02] TASK [container_runtime : Add iptables allow rules] **************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove iptables rules] ******************************* TASK [container_runtime : Add firewalld allow rules] *************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove firewalld allow rules] ************************ TASK [container_runtime : Configure the CNI network] *************************** skipping: [node02] TASK [container_runtime : Create /etc/sysconfig/crio-network] ****************** skipping: [node02] TASK [container_runtime : Start the CRI-O service] ***************************** skipping: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* skipping: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** skipping: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** skipping: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** skipping: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Node Preparation Checkpoint Start] *************************************** TASK [Set Node preparation 'In Progress'] ************************************** ok: [node02] PLAY [Only target nodes that have not yet been bootstrapped] ******************* TASK [Gathering Facts] ********************************************************* ok: [localhost] TASK [add_host] **************************************************************** skipping: [localhost] => (item=node02) ok: [localhost] => (item=node01) PLAY [Disable excluders] ******************************************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/disable.yml for node02 TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-docker-excluder version detected] ************ skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-docker-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-excluder version detected] ******************* skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** skipping: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] PLAY [Configure nodes] ********************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_cloud_provider : Set cloud provider facts] ********************* skipping: [node02] TASK [openshift_cloud_provider : Create cloudprovider config dir] ************** skipping: [node02] TASK [openshift_cloud_provider : include the defined cloud provider files] ***** skipping: [node02] TASK [openshift_node : fail] *************************************************** skipping: [node02] TASK [openshift_node : Check for NetworkManager service] *********************** ok: [node02] TASK [openshift_node : Set fact using_network_manager] ************************* ok: [node02] TASK [openshift_node : Install dnsmasq] **************************************** ok: [node02] TASK [openshift_node : ensure origin/node directory exists] ******************** changed: [node02] => (item=/etc/origin) changed: [node02] => (item=/etc/origin/node) TASK [openshift_node : Install NetworkManager during node_bootstrap provisioning] *** skipping: [node02] TASK [openshift_node : Install network manager dispatch script] **************** skipping: [node02] TASK [openshift_node : Install dnsmasq configuration] ************************** ok: [node02] TASK [openshift_node : Deploy additional dnsmasq.conf] ************************* skipping: [node02] TASK [openshift_node : Enable dnsmasq] ***************************************** ok: [node02] TASK [openshift_node : Install network manager dispatch script] **************** ok: [node02] TASK [openshift_node : Add iptables allow rules] ******************************* ok: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) ok: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) ok: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) ok: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) ok: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove iptables rules] ********************************** TASK [openshift_node : Add firewalld allow rules] ****************************** skipping: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) skipping: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) skipping: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) skipping: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) skipping: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove firewalld allow rules] *************************** TASK [openshift_node : Checking for journald.conf] ***************************** ok: [node02] TASK [openshift_node : Create journald persistence directories] **************** ok: [node02] TASK [openshift_node : Update journald setup] ********************************** ok: [node02] => (item={u'var': u'Storage', u'val': u'persistent'}) ok: [node02] => (item={u'var': u'Compress', u'val': True}) ok: [node02] => (item={u'var': u'SyncIntervalSec', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitInterval', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitBurst', u'val': 10000}) ok: [node02] => (item={u'var': u'SystemMaxUse', u'val': u'8G'}) ok: [node02] => (item={u'var': u'SystemKeepFree', u'val': u'20%'}) ok: [node02] => (item={u'var': u'SystemMaxFileSize', u'val': u'10M'}) ok: [node02] => (item={u'var': u'MaxRetentionSec', u'val': u'1month'}) ok: [node02] => (item={u'var': u'MaxFileSec', u'val': u'1day'}) ok: [node02] => (item={u'var': u'ForwardToSyslog', u'val': False}) ok: [node02] => (item={u'var': u'ForwardToWall', u'val': False}) TASK [openshift_node : Restart journald] *************************************** skipping: [node02] TASK [openshift_node : Disable swap] ******************************************* ok: [node02] TASK [openshift_node : Install node, clients, and conntrack packages] ********** ok: [node02] => (item={u'name': u'origin-node-3.10.0*'}) ok: [node02] => (item={u'name': u'origin-clients-3.10.0*'}) ok: [node02] => (item={u'name': u'conntrack-tools'}) TASK [openshift_node : Restart cri-o] ****************************************** skipping: [node02] TASK [openshift_node : restart NetworkManager to ensure resolv.conf is present] *** changed: [node02] TASK [openshift_node : sysctl] ************************************************* ok: [node02] TASK [openshift_node : Check for credentials file for registry auth] *********** skipping: [node02] TASK [openshift_node : Create credentials for registry auth] ******************* skipping: [node02] TASK [openshift_node : Create credentials for registry auth (alternative)] ***** skipping: [node02] TASK [openshift_node : Setup ro mount of /root/.docker for containerized hosts] *** skipping: [node02] TASK [openshift_node : Check that node image is present] *********************** changed: [node02] TASK [openshift_node : Pre-pull node image] ************************************ skipping: [node02] TASK [openshift_node : Copy node script to the node] *************************** ok: [node02] TASK [openshift_node : Install Node service file] ****************************** ok: [node02] TASK [openshift_node : Ensure old system path is set] ************************** skipping: [node02] => (item=/etc/origin/openvswitch) skipping: [node02] => (item=/var/lib/kubelet) skipping: [node02] => (item=/opt/cni/bin) TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Copy node container image to ostree storage] ************ skipping: [node02] TASK [openshift_node : Install or Update node system container] **************** skipping: [node02] TASK [openshift_node : Restart network manager to ensure networking configuration is in place] *** skipping: [node02] TASK [openshift_node : Configure Node settings] ******************************** ok: [node02] => (item={u'regex': u'^OPTIONS=', u'line': u'OPTIONS='}) ok: [node02] => (item={u'regex': u'^DEBUG_LOGLEVEL=', u'line': u'DEBUG_LOGLEVEL=2'}) ok: [node02] => (item={u'regex': u'^IMAGE_VERSION=', u'line': u'IMAGE_VERSION=v3.10.0-rc.0'}) TASK [openshift_node : Configure Proxy Settings] ******************************* skipping: [node02] => (item={u'regex': u'^HTTP_PROXY=', u'line': u'HTTP_PROXY='}) skipping: [node02] => (item={u'regex': u'^HTTPS_PROXY=', u'line': u'HTTPS_PROXY='}) skipping: [node02] => (item={u'regex': u'^NO_PROXY=', u'line': u'NO_PROXY=[],172.30.0.0/16,10.128.0.0/14'}) TASK [openshift_node : file] *************************************************** skipping: [node02] TASK [openshift_node : Create the Node config] ********************************* changed: [node02] TASK [openshift_node : Configure Node Environment Variables] ******************* TASK [openshift_node : Ensure the node static pod directory exists] ************ changed: [node02] TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Install NFS storage plugin dependencies] **************** ok: [node02] TASK [openshift_node : Check for existence of nfs sebooleans] ****************** ok: [node02] => (item=virt_use_nfs) ok: [node02] => (item=virt_sandbox_use_nfs) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-03 02:26:37.318557', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.023125', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-08-03 02:26:37.295432', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-03 02:26:38.549961', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.016188', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-08-03 02:26:38.533773', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-03 02:26:37.318557', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.023125', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-08-03 02:26:37.295432', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-03 02:26:38.549961', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.016188', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-08-03 02:26:38.533773', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install GlusterFS storage plugin dependencies] ********** ok: [node02] TASK [openshift_node : Check for existence of fusefs sebooleans] *************** ok: [node02] => (item=virt_use_fusefs) ok: [node02] => (item=virt_sandbox_use_fusefs) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-03 02:26:44.550619', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.017466', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-08-03 02:26:44.533153', '_ansible_ignore_errors': None, 'failed': False}) ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-03 02:26:45.689803', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.016336', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-08-03 02:26:45.673467', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-03 02:26:44.550619', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.017466', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-08-03 02:26:44.533153', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-03 02:26:45.689803', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.016336', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-08-03 02:26:45.673467', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install Ceph storage plugin dependencies] *************** ok: [node02] TASK [openshift_node : Install iSCSI storage plugin dependencies] ************** ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=device-mapper-multipath) TASK [openshift_node : restart services] *************************************** ok: [node02] => (item=multipathd) ok: [node02] => (item=rpcbind) ok: [node02] => (item=iscsid) TASK [openshift_node : Template multipath configuration] *********************** changed: [node02] TASK [openshift_node : Enable and start multipath] ***************************** changed: [node02] TASK [tuned : Check for tuned package] ***************************************** ok: [node02] TASK [tuned : Set tuned OpenShift variables] *********************************** ok: [node02] TASK [tuned : Ensure directory structure exists] ******************************* ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': u's0', 'seuser': u'unconfined_u', 'serole': u'object_r', 'ctime': 1531032437.8490183, 'state': u'file', 'gid': 0, 'mode': u'0644', 'mtime': 1531032437.8490183, 'owner': u'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': u'admin_home_t'}) TASK [tuned : Ensure files are populated from templates] *********************** skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Make tuned use the recommended tuned profile on restart] ********* changed: [node02] => (item=/etc/tuned/active_profile) changed: [node02] => (item=/etc/tuned/profile_mode) TASK [tuned : Restart tuned service] ******************************************* changed: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Install logrotate] ******* ok: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Setup logrotate.d scripts] *** PLAY [node bootstrap config] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_node : install needed rpm(s)] ********************************** ok: [node02] => (item=origin-node) ok: [node02] => (item=origin-docker-excluder) ok: [node02] => (item=ansible) ok: [node02] => (item=bash-completion) ok: [node02] => (item=docker) ok: [node02] => (item=haproxy) ok: [node02] => (item=dnsmasq) ok: [node02] => (item=ntp) ok: [node02] => (item=logrotate) ok: [node02] => (item=httpd-tools) ok: [node02] => (item=bind-utils) ok: [node02] => (item=firewalld) ok: [node02] => (item=libselinux-python) ok: [node02] => (item=conntrack-tools) ok: [node02] => (item=openssl) ok: [node02] => (item=iproute) ok: [node02] => (item=python-dbus) ok: [node02] => (item=PyYAML) ok: [node02] => (item=yum-utils) ok: [node02] => (item=glusterfs-fuse) ok: [node02] => (item=device-mapper-multipath) ok: [node02] => (item=nfs-utils) ok: [node02] => (item=cockpit-ws) ok: [node02] => (item=cockpit-system) ok: [node02] => (item=cockpit-bridge) ok: [node02] => (item=cockpit-docker) ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=ceph-common) TASK [openshift_node : create the directory for node] ************************** skipping: [node02] TASK [openshift_node : laydown systemd override] ******************************* skipping: [node02] TASK [openshift_node : update the sysconfig to have necessary variables] ******* ok: [node02] => (item={u'regexp': u'^KUBECONFIG=.*', u'line': u'KUBECONFIG=/etc/origin/node/bootstrap.kubeconfig'}) TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : disable origin-node service] **************************** changed: [node02] => (item=origin-node.service) TASK [openshift_node : Check for RPM generated config marker file .config_managed] *** ok: [node02] TASK [openshift_node : create directories for bootstrapping] ******************* ok: [node02] => (item=/root/openshift_bootstrap) changed: [node02] => (item=/var/lib/origin/openshift.local.config) changed: [node02] => (item=/var/lib/origin/openshift.local.config/node) ok: [node02] => (item=/etc/docker/certs.d/docker-registry.default.svc:5000) TASK [openshift_node : laydown the bootstrap.yml file for on boot configuration] *** ok: [node02] TASK [openshift_node : Create a symlink to the node client CA for the docker registry] *** ok: [node02] TASK [openshift_node : Remove RPM generated config files if present] *********** skipping: [node02] => (item=master) skipping: [node02] => (item=.config_managed) TASK [openshift_node : find all files in /etc/origin/node so we can remove them] *** skipping: [node02] TASK [openshift_node : Remove everything except the resolv.conf required for node] *** skipping: [node02] TASK [openshift_node_group : create node config template] ********************** changed: [node02] TASK [openshift_node_group : remove existing node config] ********************** changed: [node02] TASK [openshift_node_group : Ensure required directories are present] ********** ok: [node02] => (item=/etc/origin/node/pods) changed: [node02] => (item=/etc/origin/node/certificates) TASK [openshift_node_group : Update the sysconfig to group "node-config-compute"] *** changed: [node02] TASK [set_fact] **************************************************************** ok: [node02] PLAY [Re-enable excluder if it was previously enabled] ************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/enable.yml for node02 TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] PLAY [Node Preparation Checkpoint End] ***************************************** TASK [Set Node preparation 'Complete'] ***************************************** ok: [node02] PLAY [Distribute bootstrap and start nodes] ************************************ TASK [openshift_node : Gather node information] ******************************** changed: [node02] ok: [node01] TASK [openshift_node : Copy master bootstrap config locally] ******************* ok: [node02] TASK [openshift_node : Distribute bootstrap kubeconfig if one does not exist] *** ok: [node01] changed: [node02] TASK [openshift_node : Start and enable node for bootstrapping] **************** changed: [node01] changed: [node02] TASK [openshift_node : Get node logs] ****************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : debug] ************************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : fail] *************************************************** skipping: [node02] skipping: [node01] PLAY [Approve any pending CSR requests from inventory nodes] ******************* TASK [Dump all candidate bootstrap hostnames] ********************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Find all hostnames for bootstrapping] ************************************ ok: [node01] TASK [Dump the bootstrap hostnames] ******************************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Approve bootstrap nodes] ************************************************* changed: [node01] TASK [Get CSRs] **************************************************************** skipping: [node01] TASK [Report approval errors] ************************************************** skipping: [node01] PLAY [Ensure any inventory labels are applied to the nodes] ******************** TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [openshift_manage_node : Wait for master API to become available before proceeding] *** skipping: [node02] TASK [openshift_manage_node : Wait for Node Registration] ********************** ok: [node02 -> node01] ok: [node01 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/config.yml for node02, node01 TASK [openshift_manage_node : Set node schedulability] ************************* ok: [node02 -> node01] ok: [node01 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/set_default_node_role.yml for node02, node01 TASK [openshift_manage_node : Retrieve nodes that are marked with the infra selector or the legacy infra selector] *** ok: [node02 -> node01] TASK [openshift_manage_node : Label infra or legacy infra nodes with the new role label] *** TASK [openshift_manage_node : Retrieve non-infra, non-master nodes that are not yet labeled compute] *** ok: [node02 -> node01] TASK [openshift_manage_node : label non-master non-infra nodes compute] ******** TASK [openshift_manage_node : Label all-in-one master as a compute node] ******* skipping: [node02] PLAY RECAP ********************************************************************* localhost : ok=30 changed=0 unreachable=0 failed=0 node01 : ok=67 changed=3 unreachable=0 failed=0 node02 : ok=159 changed=33 unreachable=0 failed=0 INSTALLER STATUS *************************************************************** Initialization : Complete (0:03:56) Node Preparation : Complete (0:04:09) Sending file modes: C0755 110489328 oc Sending file modes: C0600 5649 admin.kubeconfig Cluster "node01:8443" set. Cluster "node01:8443" set. + set +e + kubectl get nodes --no-headers + cluster/kubectl.sh get nodes --no-headers node01 Ready compute,infra,master 25d v1.10.0+b81c8f8 node02 Ready compute 52s v1.10.0+b81c8f8 + kubectl_rc=0 + '[' 0 -ne 0 ']' ++ kubectl get nodes --no-headers ++ cluster/kubectl.sh get nodes --no-headers ++ grep NotReady + '[' -n '' ']' + set -e + echo 'Nodes are ready:' Nodes are ready: + kubectl get nodes + cluster/kubectl.sh get nodes NAME STATUS ROLES AGE VERSION node01 Ready compute,infra,master 25d v1.10.0+b81c8f8 node02 Ready compute 52s v1.10.0+b81c8f8 + make cluster-sync ./cluster/build.sh Building ... Untagged: localhost:32976/kubevirt/virt-controller:devel Untagged: localhost:32976/kubevirt/virt-controller@sha256:7bfb57bf24e24a446e154469bab7ad718d7c04057b282954ca930de53cc14b05 Deleted: sha256:7e518a618cab4e72e446b1c3a8f9bcd53e91609e857dab5040d34c6cb7479ef8 Deleted: sha256:d753637904744fe7069332947af4a238441f3a3c3289f858e50354d32834c07e Deleted: sha256:a26566821cf96b1beb89332ca4001454125537e865d3c478f492aea0af5e81a4 Deleted: sha256:d0145d791862f03b33704a9164b3c8c4eb80d67190a5a13c4f4c736837a33777 Untagged: localhost:32976/kubevirt/virt-launcher:devel Untagged: localhost:32976/kubevirt/virt-launcher@sha256:af1e1c27932972b27d0dbf0864a37c75793af311f2aea5169e6a8ece71f1debc Deleted: sha256:71969c5230f0d8deeb5a02e7e3571750ebe664662cda84bee97e0f3df9e89991 Deleted: sha256:acf67ef4448095236e0aed9e54b9b69710bbcecfc697e6b788be262b889a003e Deleted: sha256:939c38a59c426787ae4adcfce8a466b60202215df43ede9dba24d10481b78c31 Deleted: sha256:e64f54a023ef6478c46e074fdebfbc1d2ce308d7b0022e68dafe674c6585b70d Deleted: sha256:93c49d013f830afda870cacbdd37cc4cb2d23c811060cd05640cd284ce6afe4b Deleted: sha256:d9463ef5776caecf9ef736995bfe179100f719a1ee5f73e69efd2fc01e012e77 Deleted: sha256:fdc1ab680a4bed6df9726d3dbe8a75c72a5ac674f67c24f36c1366dac8644ac3 Deleted: sha256:06c5efcd48f50d2ef65db7f3bb0b9057495af1c9633dee7ead27c8e3bead0493 Deleted: sha256:6694d510036a98100e02c3438b6d28ea0928a16e11b7a78de7da8db94f9a60b7 Deleted: sha256:7e009c2b6f3abad717ccfd268f51f4beb7bc18ee67c41e28ead8723b938f5f2d Deleted: sha256:39f2806e234601d6edc8b5673b098208cf5ad28ea48c2f879f74508d88a8290e Deleted: sha256:c59e073a089d4f581a2ae1d44678a1c9493e26c1b184eff1dca447103751cd5e Untagged: localhost:32976/kubevirt/virt-handler:devel Untagged: localhost:32976/kubevirt/virt-handler@sha256:dc5f067a1a585e5815998ef6b0d4298eae13ad50cb6ef1232cc9ceb6a3ac95ce Deleted: sha256:9a65de8a30547172977aa916e1e2cbd6c508041bc511cf8afbcabc5718ebdce6 Deleted: sha256:b3763ec434d90dd40d10972210b11fe7432702e7d45a6c60d75875a3d7bd10c5 Deleted: sha256:161d09d650726d5e96a87b78ec23a9fc5d9b53d343c436e371de1e4e37c095fb Deleted: sha256:a7fd294c74fd29ea20ba26062cff98e850d19452f4de6f024d822283471390ac Untagged: localhost:32976/kubevirt/virt-api:devel Untagged: localhost:32976/kubevirt/virt-api@sha256:4c54fdab1dffb3a8989d10865bf76a121d16f9ba2aa3705d615e50b75410a8d2 Deleted: sha256:4da55b08a9a4f496c30f52381ca93132f8a0c037de5d779f0ac80727869bd2c3 Deleted: sha256:174abdc7ec094bf22532889e0853ab8ff2fbcde4edc2e0c99e3dd5939129a108 Deleted: sha256:6110e703ad3263b29ab264deb3a1293050b7c6a5337755be4695e87a1fdde524 Deleted: sha256:1c5daac95f923b850abdef974fd16131291631d5dab7c7ee9a4c2932ba9a63a3 Untagged: localhost:32976/kubevirt/subresource-access-test:devel Untagged: localhost:32976/kubevirt/subresource-access-test@sha256:ce8b3466877e397b53f6f2c476af3a50728050313997cf5177843d4287abccd2 Deleted: sha256:2c9067c13c407c9041dc6757b82f5649a55b5e9cfb8fb7ad0c708e7e78d983c5 Deleted: sha256:8ccf4a5d719529dedbd8d030b17f0c88baaf5013b42a743cdacd6ee77a99e338 Deleted: sha256:959cee72556fe48bf3d3a270334ee04f377ccfaa2e235ee3ebb0076d7c7715f9 Deleted: sha256:4f6349609aa73db3b7ac84cbeff768a25777557bc0156ffa2332fc1e1e018772 Untagged: localhost:32976/kubevirt/example-hook-sidecar:devel Untagged: localhost:32976/kubevirt/example-hook-sidecar@sha256:9ce84f109967756e33a852aacfcccc785e790ac72ae190ebd42ab8d11c56d1a8 Deleted: sha256:eccc039feba8565f7ef47bdeb7bdaeae08eb22c9c9610104897598ebe43984fc Deleted: sha256:c18dc82dc27a10f6e629044979a6b6a218e3eeb60392c9d78ade224db63ff8a8 Deleted: sha256:0436fba9d8ad62a41277308ad838fbf44623b32ea3fa79ea302f6735c36ae007 Deleted: sha256:7e75d5880efca91c36e35a4f57a859c1cdabf7a7508e4dd8a0c76c517b48a1cd sha256:ceba12cbc33e4e37a707840478a630db561e2427b78c8c9f9cd6d0b73276ab32 go version go1.10 linux/amd64 Waiting for rsyncd to be ready go version go1.10 linux/amd64 make[1]: Entering directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' hack/dockerized "./hack/check.sh && KUBEVIRT_VERSION= ./hack/build-go.sh install " && ./hack/build-copy-artifacts.sh sha256:ceba12cbc33e4e37a707840478a630db561e2427b78c8c9f9cd6d0b73276ab32 go version go1.10 linux/amd64 go version go1.10 linux/amd64 find: '/root/go/src/kubevirt.io/kubevirt/_out/cmd': No such file or directory Compiling tests... compiled tests.test hack/build-docker.sh build Sending build context to Docker daemon 40.4 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 84920e004a40 Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-controller ---> Using cache ---> b4f3251c6468 Step 4/8 : WORKDIR /home/virt-controller ---> Using cache ---> 813752072d9d Step 5/8 : USER 1001 ---> Using cache ---> 88b3556f36b4 Step 6/8 : COPY virt-controller /usr/bin/virt-controller ---> d4d2096298b5 Removing intermediate container 79e26c9adcc3 Step 7/8 : ENTRYPOINT /usr/bin/virt-controller ---> Running in 9ec9b642341d ---> cfb62fe4c670 Removing intermediate container 9ec9b642341d Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-controller" '' ---> Running in 758d1263a750 ---> b0d6a04a8bae Removing intermediate container 758d1263a750 Successfully built b0d6a04a8bae Sending build context to Docker daemon 43.32 MB Step 1/10 : FROM kubevirt/libvirt:4.2.0 ---> 5f0bfe81a3e0 Step 2/10 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 09010a005182 Step 3/10 : RUN dnf -y install socat genisoimage util-linux libcgroup-tools ethtool net-tools sudo && dnf -y clean all && test $(id -u qemu) = 107 # make sure that the qemu user really is 107 ---> Using cache ---> fc9481693838 Step 4/10 : COPY virt-launcher /usr/bin/virt-launcher ---> 2f059d7dc2c0 Removing intermediate container fd6626d87d8d Step 5/10 : COPY kubevirt-sudo /etc/sudoers.d/kubevirt ---> 9ce379e521ef Removing intermediate container 3c62e6fa8b9d Step 6/10 : RUN setcap CAP_NET_BIND_SERVICE=+eip /usr/bin/qemu-system-x86_64 ---> Running in f84839b36ed3  ---> c1f1ece3896c Removing intermediate container f84839b36ed3 Step 7/10 : RUN mkdir -p /usr/share/kubevirt/virt-launcher ---> Running in a2da17c130f4  ---> b81fe3efbbb9 Removing intermediate container a2da17c130f4 Step 8/10 : COPY entrypoint.sh libvirtd.sh sock-connector /usr/share/kubevirt/virt-launcher/ ---> ee6b70667536 Removing intermediate container 457d2872b178 Step 9/10 : ENTRYPOINT /usr/share/kubevirt/virt-launcher/entrypoint.sh ---> Running in 5b1f24c2aa0e ---> 32b14033991d Removing intermediate container 5b1f24c2aa0e Step 10/10 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-launcher" '' ---> Running in 9f77680e8629 ---> 863547f0bf64 Removing intermediate container 9f77680e8629 Successfully built 863547f0bf64 Sending build context to Docker daemon 41.7 MB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 84920e004a40 Step 3/5 : COPY virt-handler /usr/bin/virt-handler ---> 113b3b906ee3 Removing intermediate container d3cc5ad562b0 Step 4/5 : ENTRYPOINT /usr/bin/virt-handler ---> Running in e78d44dafa3e ---> 18a6292d3141 Removing intermediate container e78d44dafa3e Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-handler" '' ---> Running in 2ab59134f4e5 ---> 18f7506acffe Removing intermediate container 2ab59134f4e5 Successfully built 18f7506acffe Sending build context to Docker daemon 38.81 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 84920e004a40 Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-api ---> Using cache ---> 3cff23506e80 Step 4/8 : WORKDIR /home/virt-api ---> Using cache ---> e94c5606b96b Step 5/8 : USER 1001 ---> Using cache ---> af16317199f5 Step 6/8 : COPY virt-api /usr/bin/virt-api ---> 408503166a81 Removing intermediate container c4486c10874b Step 7/8 : ENTRYPOINT /usr/bin/virt-api ---> Running in 494c8f11ec43 ---> e2e4d257e875 Removing intermediate container 494c8f11ec43 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-api" '' ---> Running in c952961167ff ---> 557dd63377c6 Removing intermediate container c952961167ff Successfully built 557dd63377c6 Sending build context to Docker daemon 4.096 kB Step 1/7 : FROM fedora:28 ---> cc510acfcd70 Step 2/7 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 84920e004a40 Step 3/7 : ENV container docker ---> Using cache ---> aed3ca4ac3a3 Step 4/7 : RUN mkdir -p /images/custom /images/alpine && truncate -s 64M /images/custom/disk.img && curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /images/alpine/disk.img ---> Using cache ---> c7d0cf8fc982 Step 5/7 : ADD entrypoint.sh / ---> Using cache ---> 0393e5ee0c37 Step 6/7 : CMD /entrypoint.sh ---> Using cache ---> 23798f49dea3 Step 7/7 : LABEL "disks-images-provider" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Using cache ---> 94ce40445be4 Successfully built 94ce40445be4 Sending build context to Docker daemon 2.56 kB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 84920e004a40 Step 3/5 : ENV container docker ---> Using cache ---> aed3ca4ac3a3 Step 4/5 : RUN dnf -y install procps-ng nmap-ncat && dnf -y clean all ---> Using cache ---> d8c990eaf575 Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "vm-killer" '' ---> Using cache ---> 959b46fa16f1 Successfully built 959b46fa16f1 Sending build context to Docker daemon 5.12 kB Step 1/7 : FROM debian:sid ---> 68f33cf86aab Step 2/7 : MAINTAINER "David Vossel" \ ---> Using cache ---> 50fc79ebe51c Step 3/7 : ENV container docker ---> Using cache ---> b8e063496923 Step 4/7 : RUN apt-get update && apt-get install -y bash curl bzip2 qemu-utils && mkdir -p /disk && rm -rf /var/lib/apt/lists/* ---> Using cache ---> 8adb1572b35c Step 5/7 : ADD entry-point.sh / ---> Using cache ---> 8c0c5a52e4df Step 6/7 : CMD /entry-point.sh ---> Using cache ---> 1a4b838e5dee Step 7/7 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "registry-disk-v1alpha" '' ---> Using cache ---> 6cd99eac1b26 Successfully built 6cd99eac1b26 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33310/kubevirt/registry-disk-v1alpha:devel ---> 6cd99eac1b26 Step 2/4 : MAINTAINER "David Vossel" \ ---> Using cache ---> daf02fbc6053 Step 3/4 : RUN curl https://download.cirros-cloud.net/0.4.0/cirros-0.4.0-x86_64-disk.img > /disk/cirros.img ---> Using cache ---> 7dad63217a8a Step 4/4 : LABEL "cirros-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Using cache ---> 4040e403422c Successfully built 4040e403422c Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33310/kubevirt/registry-disk-v1alpha:devel ---> 6cd99eac1b26 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 2b3f11794cd2 Step 3/4 : RUN curl -g -L https://download.fedoraproject.org/pub/fedora/linux/releases/27/CloudImages/x86_64/images/Fedora-Cloud-Base-27-1.6.x86_64.qcow2 > /disk/fedora.qcow2 ---> Using cache ---> 42ceecc2fbea Step 4/4 : LABEL "fedora-cloud-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Using cache ---> 5f0ffa61a701 Successfully built 5f0ffa61a701 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33310/kubevirt/registry-disk-v1alpha:devel ---> 6cd99eac1b26 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 2b3f11794cd2 Step 3/4 : RUN curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /disk/alpine.iso ---> Using cache ---> 48562b170460 Step 4/4 : LABEL "alpine-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Using cache ---> 414ca08aaecd Successfully built 414ca08aaecd Sending build context to Docker daemon 35.59 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 84920e004a40 Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virtctl ---> Using cache ---> d74088d7a4fc Step 4/8 : WORKDIR /home/virtctl ---> Using cache ---> c8c857bf8d96 Step 5/8 : USER 1001 ---> Using cache ---> 36730a67b946 Step 6/8 : COPY subresource-access-test /subresource-access-test ---> da784bb1183a Removing intermediate container 792df57cb886 Step 7/8 : ENTRYPOINT /subresource-access-test ---> Running in e96fdc75b06c ---> 2fdf79d175fa Removing intermediate container e96fdc75b06c Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "subresource-access-test" '' ---> Running in 426113f808c8 ---> 9ed756b642ea Removing intermediate container 426113f808c8 Successfully built 9ed756b642ea Sending build context to Docker daemon 3.072 kB Step 1/9 : FROM fedora:28 ---> cc510acfcd70 Step 2/9 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 84920e004a40 Step 3/9 : ENV container docker ---> Using cache ---> aed3ca4ac3a3 Step 4/9 : RUN dnf -y install make git gcc && dnf -y clean all ---> Using cache ---> 6050b24a5d85 Step 5/9 : ENV GIMME_GO_VERSION 1.9.2 ---> Using cache ---> 0447d2178073 Step 6/9 : RUN mkdir -p /gimme && curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | HOME=/gimme bash >> /etc/profile.d/gimme.sh ---> Using cache ---> 291db82d955f Step 7/9 : ENV GOPATH "/go" GOBIN "/usr/bin" ---> Using cache ---> 793556477837 Step 8/9 : RUN mkdir -p /go && source /etc/profile.d/gimme.sh && go get github.com/masterzen/winrm-cli ---> Using cache ---> fd5c6e1f9461 Step 9/9 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "winrmcli" '' ---> Using cache ---> a223dce236ce Successfully built a223dce236ce Sending build context to Docker daemon 36.8 MB Step 1/5 : FROM fedora:27 ---> 9110ae7f579f Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 71a8c548e503 Step 3/5 : COPY example-hook-sidecar /example-hook-sidecar ---> ad4c9b7f522c Removing intermediate container b904d06de507 Step 4/5 : ENTRYPOINT /example-hook-sidecar ---> Running in cfa9d9ba8d56 ---> b11954cae37a Removing intermediate container cfa9d9ba8d56 Step 5/5 : LABEL "example-hook-sidecar" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Running in 6e1e99e78656 ---> 844f9405efc6 Removing intermediate container 6e1e99e78656 Successfully built 844f9405efc6 hack/build-docker.sh push The push refers to a repository [localhost:33310/kubevirt/virt-controller] b9b10fc1816e: Preparing b2f5abdac324: Preparing 891e1e4ef82a: Preparing b2f5abdac324: Pushed b9b10fc1816e: Pushed 891e1e4ef82a: Pushed devel: digest: sha256:96aef727a61cd744518d7344afb687bc32cb835a94a58e2a6504ff27381be89e size: 949 The push refers to a repository [localhost:33310/kubevirt/virt-launcher] ca9194a6c3bb: Preparing 743283a54f36: Preparing 75da6df61510: Preparing af82b63fd066: Preparing 74ee68ed9ba2: Preparing 0b99c4111657: Preparing da38cf808aa5: Preparing b83399358a92: Preparing 186d8b3e4fd8: Preparing fa6154170bf5: Preparing 5eefb9960a36: Preparing 891e1e4ef82a: Preparing 5eefb9960a36: Waiting 186d8b3e4fd8: Waiting 0b99c4111657: Waiting da38cf808aa5: Waiting 891e1e4ef82a: Waiting b83399358a92: Waiting fa6154170bf5: Waiting 743283a54f36: Pushed af82b63fd066: Pushed ca9194a6c3bb: Pushed da38cf808aa5: Pushed b83399358a92: Pushed fa6154170bf5: Pushed 186d8b3e4fd8: Pushed 891e1e4ef82a: Mounted from kubevirt/virt-controller 75da6df61510: Pushed 0b99c4111657: Pushed 74ee68ed9ba2: Pushed 5eefb9960a36: Pushed devel: digest: sha256:1e02f45a98d3b4989ed6faec8798641aaeaf77793f7967923da497e1b8b8ccc9 size: 2828 The push refers to a repository [localhost:33310/kubevirt/virt-handler] 7164234f36d5: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-launcher 7164234f36d5: Pushed devel: digest: sha256:dc8c809423b39bba588ac1b7532aa509848be5036d76957589d5d0418403425a size: 741 The push refers to a repository [localhost:33310/kubevirt/virt-api] 2188f73e8892: Preparing afd1d781e4d1: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-handler afd1d781e4d1: Pushed 2188f73e8892: Pushed devel: digest: sha256:75e48ce434cdd01a39967002f255a82624956f68aab8443766ff91e2e57ffa26 size: 948 The push refers to a repository [localhost:33310/kubevirt/disks-images-provider] dc0875c44573: Preparing 8fc77a44094f: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-api dc0875c44573: Pushed 8fc77a44094f: Pushed devel: digest: sha256:6983cf788a7b820c1e35fcae3ab69f0c6bf8246bd9b125048ab46c757eff9e58 size: 948 The push refers to a repository [localhost:33310/kubevirt/vm-killer] d1b69e768421: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/disks-images-provider d1b69e768421: Pushed devel: digest: sha256:a51ee0305cb1255dac598ac872707bce340d95ba5394dc3fac22ce1ec3178090 size: 740 The push refers to a repository [localhost:33310/kubevirt/registry-disk-v1alpha] 2a15632f54d4: Preparing 91a924e03d7c: Preparing 25edbec0eaea: Preparing 2a15632f54d4: Pushed 91a924e03d7c: Pushed 25edbec0eaea: Pushed devel: digest: sha256:92625a01ad38ccbd20daaef348bee72b98b522337c3fe8d9d464164226ef71d6 size: 948 The push refers to a repository [localhost:33310/kubevirt/cirros-registry-disk-demo] 8679079ce47d: Preparing 2a15632f54d4: Preparing 91a924e03d7c: Preparing 25edbec0eaea: Preparing 91a924e03d7c: Mounted from kubevirt/registry-disk-v1alpha 25edbec0eaea: Mounted from kubevirt/registry-disk-v1alpha 2a15632f54d4: Mounted from kubevirt/registry-disk-v1alpha 8679079ce47d: Pushed devel: digest: sha256:61bd0de23218ace144f2feed65e671e3ca047a9699f1bea4f5ad6f0e600591c6 size: 1160 The push refers to a repository [localhost:33310/kubevirt/fedora-cloud-registry-disk-demo] 6aaf09f3f0fe: Preparing 2a15632f54d4: Preparing 91a924e03d7c: Preparing 25edbec0eaea: Preparing 91a924e03d7c: Mounted from kubevirt/cirros-registry-disk-demo 25edbec0eaea: Mounted from kubevirt/cirros-registry-disk-demo 2a15632f54d4: Mounted from kubevirt/cirros-registry-disk-demo 6aaf09f3f0fe: Pushed devel: digest: sha256:28087ba777a949173f9c960afae98fce1ff6454b81d6bf707360697f8c025290 size: 1161 The push refers to a repository [localhost:33310/kubevirt/alpine-registry-disk-demo] 5013b58347dc: Preparing 2a15632f54d4: Preparing 91a924e03d7c: Preparing 25edbec0eaea: Preparing 25edbec0eaea: Mounted from kubevirt/fedora-cloud-registry-disk-demo 2a15632f54d4: Mounted from kubevirt/fedora-cloud-registry-disk-demo 91a924e03d7c: Mounted from kubevirt/fedora-cloud-registry-disk-demo 5013b58347dc: Pushed devel: digest: sha256:b169183e203b28522665168fa28ee18fa3e8a957d8590ec8d01087333c0c2c08 size: 1160 The push refers to a repository [localhost:33310/kubevirt/subresource-access-test] 515c15e76414: Preparing 4052ce9d0aff: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/vm-killer 4052ce9d0aff: Pushed 515c15e76414: Pushed devel: digest: sha256:450c8314ddea010b5e2e5463335d36c07483702eed19e0343b240b686652f11e size: 948 The push refers to a repository [localhost:33310/kubevirt/winrmcli] 64ccc7ac4271: Preparing 4242962b50c3: Preparing 0e374d8c733e: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/subresource-access-test 64ccc7ac4271: Pushed 0e374d8c733e: Pushed 4242962b50c3: Pushed devel: digest: sha256:ac4caed4b55aa84f3428119d18dc8366d2b793673fba9149ee91174454708ac1 size: 1165 The push refers to a repository [localhost:33310/kubevirt/example-hook-sidecar] 17d554b9fc97: Preparing 39bae602f753: Preparing 17d554b9fc97: Pushed 39bae602f753: Pushed devel: digest: sha256:afac148f2e1bd248821ff21d14652dc1c5184deeab4950edc23d2f88d895c08b size: 740 make[1]: Leaving directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' Done ./cluster/clean.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release0 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release0 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-185-g68abd3e ++ KUBEVIRT_VERSION=v0.7.0-185-g68abd3e + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:50a4b8ee3e07d592e7e4fbf3eb1401980a5947499dfdc3d847c085b5775aaa9a ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:33310/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Cleaning up ...' Cleaning up ... + cluster/kubectl.sh get vmis --all-namespaces -o=custom-columns=NAME:.metadata.name,NAMESPACE:.metadata.namespace,FINALIZERS:.metadata.finalizers --no-headers + grep foregroundDeleteVirtualMachine + read p error: the server doesn't have a resource type "vmis" + _kubectl delete ds -l kubevirt.io -n kube-system --cascade=false --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=libvirt --force --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=virt-handler --force --grace-period 0 No resources found + namespaces=(default ${namespace}) + for i in '${namespaces[@]}' + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete deployment -l kubevirt.io No resources found + _kubectl -n default delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rs -l kubevirt.io No resources found + _kubectl -n default delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete services -l kubevirt.io No resources found + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n default delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete secrets -l kubevirt.io No resources found + _kubectl -n default delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pv -l kubevirt.io No resources found + _kubectl -n default delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pvc -l kubevirt.io No resources found + _kubectl -n default delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete ds -l kubevirt.io No resources found + _kubectl -n default delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n default delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pods -l kubevirt.io No resources found + _kubectl -n default delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n default delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rolebinding -l kubevirt.io No resources found + _kubectl -n default delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete roles -l kubevirt.io No resources found + _kubectl -n default delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterroles -l kubevirt.io No resources found + _kubectl -n default delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n default get crd offlinevirtualmachines.kubevirt.io ++ wc -l ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n default get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + for i in '${namespaces[@]}' + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete deployment -l kubevirt.io No resources found + _kubectl -n kube-system delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rs -l kubevirt.io No resources found + _kubectl -n kube-system delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete services -l kubevirt.io No resources found + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n kube-system delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete secrets -l kubevirt.io No resources found + _kubectl -n kube-system delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pv -l kubevirt.io No resources found + _kubectl -n kube-system delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pvc -l kubevirt.io No resources found + _kubectl -n kube-system delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete ds -l kubevirt.io No resources found + _kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n kube-system delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pods -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete roles -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterroles -l kubevirt.io No resources found + _kubectl -n kube-system delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ wc -l ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + sleep 2 + echo Done Done ./cluster/deploy.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release0 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release0 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-185-g68abd3e ++ KUBEVIRT_VERSION=v0.7.0-185-g68abd3e + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:50a4b8ee3e07d592e7e4fbf3eb1401980a5947499dfdc3d847c085b5775aaa9a ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:33310/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Deploying ...' Deploying ... + [[ -z openshift-3.10-release ]] + [[ openshift-3.10-release =~ .*-dev ]] + [[ openshift-3.10-release =~ .*-release ]] + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/demo-content.yaml =~ .*demo.* ]] + continue + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml =~ .*demo.* ]] + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml clusterrole.rbac.authorization.k8s.io "kubevirt.io:admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:edit" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:view" created serviceaccount "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver-auth-delegator" created rolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created role.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-controller" created serviceaccount "kubevirt-controller" created serviceaccount "kubevirt-privileged" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller-cluster-admin" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-privileged-cluster-admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:default" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt.io:default" created service "virt-api" created deployment.extensions "virt-api" created deployment.extensions "virt-controller" created daemonset.extensions "virt-handler" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstances.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancereplicasets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancepresets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachines.kubevirt.io" created + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R persistentvolumeclaim "disk-alpine" created persistentvolume "host-path-disk-alpine" created persistentvolumeclaim "disk-custom" created persistentvolume "host-path-disk-custom" created daemonset.extensions "disks-images-provider" created serviceaccount "kubevirt-testing" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-testing-cluster-admin" created + [[ os-3.10.0 =~ os-* ]] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-controller"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-testing"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-privileged"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-apiserver"] + _kubectl adm policy add-scc-to-user privileged admin + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged admin scc "privileged" added to: ["admin"] + echo Done Done + namespaces=(kube-system default) + [[ kube-system != \k\u\b\e\-\s\y\s\t\e\m ]] + timeout=300 + sample=30 + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n kube-system --no-headers ++ grep -v Running ++ cluster/kubectl.sh get pods -n kube-system --no-headers + '[' -n 'disks-images-provider-mjqgd 0/1 ContainerCreating 0 2s disks-images-provider-n7d9r 0/1 ContainerCreating 0 2s virt-api-7d79764579-8d5mf 0/1 ContainerCreating 0 3s virt-api-7d79764579-qtnl5 0/1 ContainerCreating 0 3s virt-controller-7d57d96b65-4dwj2 0/1 ContainerCreating 0 3s virt-controller-7d57d96b65-88mrf 0/1 ContainerCreating 0 3s virt-handler-97tqs 0/1 ContainerCreating 0 3s virt-handler-hmjbh 0/1 ContainerCreating 0 3s' ']' + echo 'Waiting for kubevirt pods to enter the Running state ...' Waiting for kubevirt pods to enter the Running state ... + kubectl get pods -n kube-system --no-headers + cluster/kubectl.sh get pods -n kube-system --no-headers + grep -v Running disks-images-provider-mjqgd 0/1 ContainerCreating 0 2s disks-images-provider-n7d9r 0/1 ContainerCreating 0 2s virt-api-7d79764579-8d5mf 0/1 ContainerCreating 0 3s virt-api-7d79764579-qtnl5 0/1 ContainerCreating 0 3s virt-controller-7d57d96b65-4dwj2 0/1 ContainerCreating 0 3s virt-controller-7d57d96b65-88mrf 0/1 ContainerCreating 0 3s virt-handler-97tqs 0/1 ContainerCreating 0 3s virt-handler-hmjbh 0/1 ContainerCreating 0 3s + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system --no-headers ++ grep -v Running ++ cluster/kubectl.sh get pods -n kube-system --no-headers + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n '' ']' + kubectl get pods -n kube-system + cluster/kubectl.sh get pods -n kube-system NAME READY STATUS RESTARTS AGE disks-images-provider-mjqgd 1/1 Running 0 33s disks-images-provider-n7d9r 1/1 Running 0 33s master-api-node01 1/1 Running 1 25d master-controllers-node01 1/1 Running 1 25d master-etcd-node01 1/1 Running 1 25d virt-api-7d79764579-8d5mf 1/1 Running 0 34s virt-api-7d79764579-qtnl5 1/1 Running 1 34s virt-controller-7d57d96b65-4dwj2 1/1 Running 0 34s virt-controller-7d57d96b65-88mrf 1/1 Running 0 34s virt-handler-97tqs 1/1 Running 0 34s virt-handler-hmjbh 1/1 Running 0 34s + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n default --no-headers ++ grep -v Running ++ cluster/kubectl.sh get pods -n default --no-headers + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ cluster/kubectl.sh get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false + '[' -n '' ']' + kubectl get pods -n default + cluster/kubectl.sh get pods -n default NAME READY STATUS RESTARTS AGE docker-registry-1-rl562 1/1 Running 1 25d registry-console-1-rw9zf 1/1 Running 1 25d router-1-6cch9 1/1 Running 1 25d + kubectl version + cluster/kubectl.sh version oc v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 features: Basic-Auth GSSAPI Kerberos SPNEGO Server https://127.0.0.1:33307 openshift v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 + ginko_params='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + [[ openshift-3.10-release =~ windows.* ]] + FUNC_TEST_ARGS='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + make functest hack/dockerized "hack/build-func-tests.sh" sha256:ceba12cbc33e4e37a707840478a630db561e2427b78c8c9f9cd6d0b73276ab32 go version go1.10 linux/amd64 Waiting for rsyncd to be ready go version go1.10 linux/amd64 Compiling tests... compiled tests.test hack/functests.sh Running Suite: Tests Suite ========================== Random Seed: 1533263863 Will run 151 of 151 specs •• Pod name: disks-images-provider-mjqgd Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-n7d9r Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8d5mf Pod phase: Running level=info timestamp=2018-08-03T02:42:00.164346Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 02:42:04 http: TLS handshake error from 10.129.0.1:43038: EOF level=info timestamp=2018-08-03T02:42:04.272261Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:42:10.214849Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 02:42:14 http: TLS handshake error from 10.129.0.1:43046: EOF level=info timestamp=2018-08-03T02:42:19.261622Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:42:19.264103Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:42:20.266570Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 02:42:24 http: TLS handshake error from 10.129.0.1:43054: EOF level=info timestamp=2018-08-03T02:42:30.313097Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 02:42:34 http: TLS handshake error from 10.129.0.1:43062: EOF level=info timestamp=2018-08-03T02:42:34.338444Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:42:40.365205Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:42:41.131225Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 02:42:44 http: TLS handshake error from 10.129.0.1:43070: EOF Pod name: virt-api-7d79764579-qtnl5 Pod phase: Running level=info timestamp=2018-08-03T02:40:41.135318Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 02:40:45 http: TLS handshake error from 10.129.0.1:34768: EOF 2018/08/03 02:40:55 http: TLS handshake error from 10.129.0.1:34776: EOF 2018/08/03 02:41:05 http: TLS handshake error from 10.129.0.1:34784: EOF 2018/08/03 02:41:15 http: TLS handshake error from 10.129.0.1:34792: EOF 2018/08/03 02:41:25 http: TLS handshake error from 10.129.0.1:34800: EOF 2018/08/03 02:41:35 http: TLS handshake error from 10.129.0.1:34810: EOF level=info timestamp=2018-08-03T02:41:41.074442Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 02:41:45 http: TLS handshake error from 10.129.0.1:34818: EOF 2018/08/03 02:41:55 http: TLS handshake error from 10.129.0.1:34826: EOF 2018/08/03 02:42:05 http: TLS handshake error from 10.129.0.1:34834: EOF level=info timestamp=2018-08-03T02:42:11.127943Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 02:42:15 http: TLS handshake error from 10.129.0.1:34842: EOF 2018/08/03 02:42:25 http: TLS handshake error from 10.129.0.1:34850: EOF 2018/08/03 02:42:35 http: TLS handshake error from 10.129.0.1:34858: EOF Pod name: virt-controller-7d57d96b65-4dwj2 Pod phase: Running level=info timestamp=2018-08-03T02:37:05.382641Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-88mrf Pod phase: Running level=info timestamp=2018-08-03T02:37:44.825784Z pos=vm.go:470 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwhddc kind= uid=33cf6fa0-96c6-11e8-bbc9-525500d15501 msg="VirtualMachineInstance created bacause testvmiwhddc was added." level=info timestamp=2018-08-03T02:37:44.829044Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwhddc kind= uid=33cf6fa0-96c6-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T02:37:44.829911Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwhddc kind= uid=33c0c501-96c6-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:37:44.831482Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwhddc kind= uid=33cf6fa0-96c6-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T02:37:44.831518Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwhddc kind= uid=33c0c501-96c6-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:37:44.845745Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwhddc kind= uid=33c0c501-96c6-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:37:44.845788Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwhddc kind= uid=33c0c501-96c6-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:37:44.848393Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwhddc kind= uid=33c0c501-96c6-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:37:44.848449Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwhddc kind= uid=33c0c501-96c6-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:37:44.896757Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwhddc kind= uid=33c0c501-96c6-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:37:44.896831Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwhddc kind= uid=33c0c501-96c6-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:37:44.904066Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwhddc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwhddc" level=info timestamp=2018-08-03T02:37:44.918012Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwhddc kind= uid=33c0c501-96c6-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:37:44.918110Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwhddc kind= uid=33c0c501-96c6-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:37:44.925735Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwhddc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwhddc" Pod name: virt-handler-97tqs Pod phase: Running level=info timestamp=2018-08-03T02:37:07.058121Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-03T02:37:07.072121Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-03T02:37:07.074218Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-03T02:37:07.173500Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-03T02:37:07.185432Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-03T02:37:07.187158Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-handler-hmjbh Pod phase: Running level=info timestamp=2018-08-03T02:37:05.168290Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-03T02:37:05.190468Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-03T02:37:05.195930Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-03T02:37:05.295904Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-03T02:37:05.562268Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-03T02:37:05.563222Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=error timestamp=2018-08-03T02:37:05.867110Z pos=vm.go:800 component=virt-handler reason="open /proc/29783/cmdline: no such file or directory" msg="failed to set a cpu manager label on host node01" Pod name: virt-launcher-testvmiwhddc-7cxx7 Pod phase: Pending level=info timestamp=2018-08-03T02:37:48.131923Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-03T02:37:48.133770Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-03T02:37:48.135788Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-03T02:37:58.150121Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-03T02:37:58.198448Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiwhddc" level=info timestamp=2018-08-03T02:37:58.200684Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-03T02:37:58.200864Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" ------------------------------ • Failure [300.430 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should update VirtualMachine once VMIs are up [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:195 Timed out after 300.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:201 ------------------------------ •• Pod name: disks-images-provider-mjqgd Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-n7d9r Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8d5mf Pod phase: Running level=info timestamp=2018-08-03T02:47:20.265907Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:47:21.804781Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 02:47:24 http: TLS handshake error from 10.129.0.1:43296: EOF level=info timestamp=2018-08-03T02:47:28.037345Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:47:28.051455Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:47:28.062997Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:47:31.843986Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 02:47:34 http: TLS handshake error from 10.129.0.1:43306: EOF level=info timestamp=2018-08-03T02:47:34.910002Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:47:37.091298Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:47:37.104624Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:47:37.117097Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:47:41.075660Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-03T02:47:41.896084Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 02:47:44 http: TLS handshake error from 10.129.0.1:43314: EOF Pod name: virt-api-7d79764579-qtnl5 Pod phase: Running level=info timestamp=2018-08-03T02:45:41.072296Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 02:45:45 http: TLS handshake error from 10.129.0.1:35012: EOF 2018/08/03 02:45:55 http: TLS handshake error from 10.129.0.1:35020: EOF 2018/08/03 02:46:05 http: TLS handshake error from 10.129.0.1:35028: EOF level=info timestamp=2018-08-03T02:46:11.100694Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 02:46:15 http: TLS handshake error from 10.129.0.1:35036: EOF 2018/08/03 02:46:25 http: TLS handshake error from 10.129.0.1:35044: EOF 2018/08/03 02:46:35 http: TLS handshake error from 10.129.0.1:35052: EOF 2018/08/03 02:46:45 http: TLS handshake error from 10.129.0.1:35060: EOF 2018/08/03 02:46:55 http: TLS handshake error from 10.129.0.1:35068: EOF 2018/08/03 02:47:05 http: TLS handshake error from 10.129.0.1:35076: EOF 2018/08/03 02:47:15 http: TLS handshake error from 10.129.0.1:35084: EOF 2018/08/03 02:47:25 http: TLS handshake error from 10.129.0.1:35092: EOF 2018/08/03 02:47:35 http: TLS handshake error from 10.129.0.1:35102: EOF 2018/08/03 02:47:45 http: TLS handshake error from 10.129.0.1:35110: EOF Pod name: virt-controller-7d57d96b65-4dwj2 Pod phase: Running level=info timestamp=2018-08-03T02:37:05.382641Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-88mrf Pod phase: Running level=info timestamp=2018-08-03T02:42:49.655937Z pos=vm.go:377 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi59h6w kind= uid=e980173f-96c6-11e8-bbc9-525500d15501 msg="Setting stabile UUID '62af9542-2497-5c23-952b-c53475cc56ad' (was '')" level=info timestamp=2018-08-03T02:42:49.666187Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi59h6w kind= uid=e982e797-96c6-11e8-bbc9-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=info timestamp=2018-08-03T02:42:49.666251Z pos=vm.go:470 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi59h6w kind= uid=e982e797-96c6-11e8-bbc9-525500d15501 msg="VirtualMachineInstance created bacause testvmi59h6w was added." level=info timestamp=2018-08-03T02:42:49.666283Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi59h6w kind= uid=e980173f-96c6-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:42:49.666316Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi59h6w kind= uid=e980173f-96c6-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:42:49.666866Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi59h6w kind= uid=e982e797-96c6-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T02:42:49.666945Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi59h6w kind= uid=e982e797-96c6-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T02:42:49.694726Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi59h6w kind= uid=e980173f-96c6-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:42:49.694841Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi59h6w kind= uid=e980173f-96c6-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:42:49.718375Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi59h6w kind= uid=e980173f-96c6-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:42:49.718457Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi59h6w kind= uid=e980173f-96c6-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:42:49.734422Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi59h6w\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi59h6w" level=info timestamp=2018-08-03T02:42:49.753011Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi59h6w kind= uid=e980173f-96c6-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:42:49.753081Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi59h6w kind= uid=e980173f-96c6-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:42:49.765968Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi59h6w\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi59h6w" Pod name: virt-handler-97tqs Pod phase: Running level=info timestamp=2018-08-03T02:37:07.058121Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-03T02:37:07.072121Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-03T02:37:07.074218Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-03T02:37:07.173500Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-03T02:37:07.185432Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-03T02:37:07.187158Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-handler-hmjbh Pod phase: Running level=info timestamp=2018-08-03T02:37:05.168290Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-03T02:37:05.190468Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-03T02:37:05.195930Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-03T02:37:05.295904Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-03T02:37:05.562268Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-03T02:37:05.563222Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=error timestamp=2018-08-03T02:37:05.867110Z pos=vm.go:800 component=virt-handler reason="open /proc/29783/cmdline: no such file or directory" msg="failed to set a cpu manager label on host node01" Pod name: virt-launcher-testvmi59h6w-4sk5n Pod phase: Pending level=info timestamp=2018-08-03T02:42:53.516952Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-03T02:42:53.518335Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-03T02:42:53.520166Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-03T02:43:03.528080Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-03T02:43:03.578618Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi59h6w" level=info timestamp=2018-08-03T02:43:03.581618Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-03T02:43:03.581938Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" ------------------------------ • Failure [300.456 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should recreate VirtualMachineInstance if it gets deleted [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:245 Timed out after 300.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:157 ------------------------------ STEP: Starting the VirtualMachineInstance STEP: VMI has the running condition Pod name: disks-images-provider-mjqgd Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-n7d9r Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8d5mf Pod phase: Running 2018/08/03 02:49:04 http: TLS handshake error from 10.129.0.1:43378: EOF level=info timestamp=2018-08-03T02:49:05.079660Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:49:12.331081Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 02:49:14 http: TLS handshake error from 10.129.0.1:43386: EOF level=info timestamp=2018-08-03T02:49:20.748970Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:49:20.758987Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:49:22.373535Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 02:49:24 http: TLS handshake error from 10.129.0.1:43394: EOF level=info timestamp=2018-08-03T02:49:32.434805Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 02:49:34 http: TLS handshake error from 10.129.0.1:43402: EOF level=info timestamp=2018-08-03T02:49:35.133709Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:49:40.312318Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-03T02:49:40.628493Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-03T02:49:42.524317Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 02:49:44 http: TLS handshake error from 10.129.0.1:43410: EOF Pod name: virt-api-7d79764579-qtnl5 Pod phase: Running 2018/08/03 02:47:55 http: TLS handshake error from 10.129.0.1:35118: EOF 2018/08/03 02:48:05 http: TLS handshake error from 10.129.0.1:35126: EOF level=info timestamp=2018-08-03T02:48:11.080371Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 02:48:15 http: TLS handshake error from 10.129.0.1:35134: EOF 2018/08/03 02:48:25 http: TLS handshake error from 10.129.0.1:35142: EOF 2018/08/03 02:48:35 http: TLS handshake error from 10.129.0.1:35150: EOF 2018/08/03 02:48:45 http: TLS handshake error from 10.129.0.1:35158: EOF 2018/08/03 02:48:55 http: TLS handshake error from 10.129.0.1:35166: EOF 2018/08/03 02:49:05 http: TLS handshake error from 10.129.0.1:35174: EOF level=info timestamp=2018-08-03T02:49:11.107176Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 02:49:15 http: TLS handshake error from 10.129.0.1:35182: EOF 2018/08/03 02:49:25 http: TLS handshake error from 10.129.0.1:35190: EOF 2018/08/03 02:49:35 http: TLS handshake error from 10.129.0.1:35198: EOF level=info timestamp=2018-08-03T02:49:41.163000Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 02:49:45 http: TLS handshake error from 10.129.0.1:35206: EOF Pod name: virt-controller-7d57d96b65-4dwj2 Pod phase: Running level=info timestamp=2018-08-03T02:37:05.382641Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-88mrf Pod phase: Running level=info timestamp=2018-08-03T02:47:50.114476Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibljsh kind= uid=9c97b213-96c7-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T02:47:50.115523Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibljsh kind= uid=9c963274-96c7-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:47:50.115562Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibljsh kind= uid=9c963274-96c7-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:47:50.134388Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibljsh kind= uid=9c963274-96c7-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:47:50.134491Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibljsh kind= uid=9c963274-96c7-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=error timestamp=2018-08-03T02:47:50.139926Z pos=vm.go:202 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibljsh kind= uid=9c963274-96c7-11e8-bbc9-525500d15501 reason="Operation cannot be fulfilled on virtualmachines.kubevirt.io \"testvmibljsh\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachine status failed." level=info timestamp=2018-08-03T02:47:50.140004Z pos=vm.go:111 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachines.kubevirt.io \"testvmibljsh\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachine kubevirt-test-default/testvmibljsh" level=info timestamp=2018-08-03T02:47:50.140054Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibljsh kind= uid=9c963274-96c7-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:47:50.140130Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibljsh kind= uid=9c963274-96c7-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:47:50.145217Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibljsh kind= uid=9c963274-96c7-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:47:50.145305Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibljsh kind= uid=9c963274-96c7-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:47:50.172358Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibljsh kind= uid=9c963274-96c7-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:47:50.172451Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibljsh kind= uid=9c963274-96c7-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:47:50.190420Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibljsh kind= uid=9c963274-96c7-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:47:50.190513Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibljsh kind= uid=9c963274-96c7-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" Pod name: virt-handler-97tqs Pod phase: Running level=info timestamp=2018-08-03T02:37:07.058121Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-03T02:37:07.072121Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-03T02:37:07.074218Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-03T02:37:07.173500Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-03T02:37:07.185432Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-03T02:37:07.187158Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-handler-hmjbh Pod phase: Running level=info timestamp=2018-08-03T02:37:05.168290Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-03T02:37:05.190468Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-03T02:37:05.195930Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-03T02:37:05.295904Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-03T02:37:05.562268Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-03T02:37:05.563222Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=error timestamp=2018-08-03T02:37:05.867110Z pos=vm.go:800 component=virt-handler reason="open /proc/29783/cmdline: no such file or directory" msg="failed to set a cpu manager label on host node01" Pod name: virt-launcher-testvmibljsh-vczm6 Pod phase: Pending level=info timestamp=2018-08-03T02:47:52.828804Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-03T02:47:52.830005Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-03T02:47:52.834045Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-03T02:48:02.841228Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-03T02:48:02.888917Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmibljsh" level=info timestamp=2018-08-03T02:48:02.891192Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-03T02:48:02.891466Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [120.434 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should recreate VirtualMachineInstance if the VirtualMachineInstance's pod gets deleted [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:265 Timed out after 120.000s. Expected success, but got an error: <*errors.errorString | 0xc420391620>: { s: "vmi still isn't running", } vmi still isn't running /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:284 ------------------------------ STEP: Creating a new VMI STEP: Waiting for the VMI's VirtualMachineInstance to start Pod name: disks-images-provider-mjqgd Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-n7d9r Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8d5mf Pod phase: Running 2018/08/03 02:53:54 http: TLS handshake error from 10.129.0.1:43614: EOF level=info timestamp=2018-08-03T02:54:03.861935Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 02:54:04 http: TLS handshake error from 10.129.0.1:43622: EOF level=info timestamp=2018-08-03T02:54:05.628715Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:54:13.924700Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 02:54:14 http: TLS handshake error from 10.129.0.1:43630: EOF level=info timestamp=2018-08-03T02:54:21.855052Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:54:21.855819Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:54:23.972764Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 02:54:24 http: TLS handshake error from 10.129.0.1:43638: EOF level=info timestamp=2018-08-03T02:54:34.023580Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 02:54:34 http: TLS handshake error from 10.129.0.1:43646: EOF level=info timestamp=2018-08-03T02:54:35.672483Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 02:54:44 http: TLS handshake error from 10.129.0.1:43654: EOF level=info timestamp=2018-08-03T02:54:44.074016Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-qtnl5 Pod phase: Running 2018/08/03 02:52:55 http: TLS handshake error from 10.129.0.1:35360: EOF 2018/08/03 02:53:05 http: TLS handshake error from 10.129.0.1:35368: EOF 2018/08/03 02:53:15 http: TLS handshake error from 10.129.0.1:35376: EOF 2018/08/03 02:53:25 http: TLS handshake error from 10.129.0.1:35384: EOF 2018/08/03 02:53:35 http: TLS handshake error from 10.129.0.1:35394: EOF level=info timestamp=2018-08-03T02:53:41.099858Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 02:53:45 http: TLS handshake error from 10.129.0.1:35402: EOF 2018/08/03 02:53:55 http: TLS handshake error from 10.129.0.1:35410: EOF 2018/08/03 02:54:05 http: TLS handshake error from 10.129.0.1:35418: EOF level=info timestamp=2018-08-03T02:54:11.091508Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 02:54:15 http: TLS handshake error from 10.129.0.1:35426: EOF 2018/08/03 02:54:25 http: TLS handshake error from 10.129.0.1:35434: EOF 2018/08/03 02:54:35 http: TLS handshake error from 10.129.0.1:35442: EOF level=info timestamp=2018-08-03T02:54:41.144026Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 02:54:45 http: TLS handshake error from 10.129.0.1:35450: EOF Pod name: virt-controller-7d57d96b65-4dwj2 Pod phase: Running level=info timestamp=2018-08-03T02:37:05.382641Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-88mrf Pod phase: Running level=info timestamp=2018-08-03T02:49:50.560612Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6jncn kind= uid=e4621398-96c7-11e8-bbc9-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=info timestamp=2018-08-03T02:49:50.560715Z pos=vm.go:470 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6jncn kind= uid=e4621398-96c7-11e8-bbc9-525500d15501 msg="VirtualMachineInstance created bacause testvmi6jncn was added." level=info timestamp=2018-08-03T02:49:50.560754Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6jncn kind= uid=e45ec922-96c7-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:49:50.560829Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6jncn kind= uid=e45ec922-96c7-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:49:50.563362Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6jncn kind= uid=e4621398-96c7-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T02:49:50.563486Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6jncn kind= uid=e4621398-96c7-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T02:49:50.577328Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6jncn kind= uid=e45ec922-96c7-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:49:50.577491Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6jncn kind= uid=e45ec922-96c7-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:49:50.582837Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6jncn kind= uid=e45ec922-96c7-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:49:50.582896Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6jncn kind= uid=e45ec922-96c7-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:49:50.628564Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6jncn kind= uid=e45ec922-96c7-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:49:50.628649Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6jncn kind= uid=e45ec922-96c7-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:49:50.635271Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6jncn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6jncn" level=info timestamp=2018-08-03T02:49:50.651748Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6jncn kind= uid=e45ec922-96c7-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:49:50.651843Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6jncn kind= uid=e45ec922-96c7-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" Pod name: virt-handler-97tqs Pod phase: Running level=info timestamp=2018-08-03T02:37:07.058121Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-03T02:37:07.072121Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-03T02:37:07.074218Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-03T02:37:07.173500Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-03T02:37:07.185432Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-03T02:37:07.187158Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-handler-hmjbh Pod phase: Running level=info timestamp=2018-08-03T02:37:05.168290Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-03T02:37:05.190468Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-03T02:37:05.195930Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-03T02:37:05.295904Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-03T02:37:05.562268Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-03T02:37:05.563222Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=error timestamp=2018-08-03T02:37:05.867110Z pos=vm.go:800 component=virt-handler reason="open /proc/29783/cmdline: no such file or directory" msg="failed to set a cpu manager label on host node01" Pod name: virt-launcher-testvmi6jncn-cc2mh Pod phase: Pending level=info timestamp=2018-08-03T02:49:53.859572Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-03T02:49:53.859851Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-03T02:49:53.861319Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-03T02:50:03.869793Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-03T02:50:03.914431Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi6jncn" level=info timestamp=2018-08-03T02:50:03.917444Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-03T02:50:03.917936Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [300.561 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should stop VirtualMachineInstance if running set to false [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:325 Timed out after 300.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:157 ------------------------------ STEP: Starting the VirtualMachineInstance STEP: VMI has the running condition Pod name: disks-images-provider-mjqgd Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-n7d9r Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8d5mf Pod phase: Running level=info timestamp=2018-08-03T02:59:23.018226Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 02:59:24 http: TLS handshake error from 10.129.0.1:43880: EOF level=info timestamp=2018-08-03T02:59:25.452033Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:59:30.319531Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:59:30.334251Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:59:30.350344Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 02:59:34 http: TLS handshake error from 10.129.0.1:43890: EOF level=info timestamp=2018-08-03T02:59:35.502510Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:59:36.236820Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:59:39.345497Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:59:39.358789Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:59:39.371813Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T02:59:40.312307Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 02:59:44 http: TLS handshake error from 10.129.0.1:43898: EOF level=info timestamp=2018-08-03T02:59:45.555531Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-qtnl5 Pod phase: Running 2018/08/03 02:57:55 http: TLS handshake error from 10.129.0.1:35604: EOF 2018/08/03 02:58:05 http: TLS handshake error from 10.129.0.1:35612: EOF level=info timestamp=2018-08-03T02:58:11.135300Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 02:58:15 http: TLS handshake error from 10.129.0.1:35620: EOF 2018/08/03 02:58:25 http: TLS handshake error from 10.129.0.1:35628: EOF 2018/08/03 02:58:35 http: TLS handshake error from 10.129.0.1:35636: EOF 2018/08/03 02:58:45 http: TLS handshake error from 10.129.0.1:35644: EOF 2018/08/03 02:58:55 http: TLS handshake error from 10.129.0.1:35652: EOF 2018/08/03 02:59:05 http: TLS handshake error from 10.129.0.1:35660: EOF 2018/08/03 02:59:15 http: TLS handshake error from 10.129.0.1:35668: EOF 2018/08/03 02:59:25 http: TLS handshake error from 10.129.0.1:35676: EOF 2018/08/03 02:59:35 http: TLS handshake error from 10.129.0.1:35686: EOF level=info timestamp=2018-08-03T02:59:40.618466Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-03T02:59:41.099717Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 02:59:45 http: TLS handshake error from 10.129.0.1:35694: EOF Pod name: virt-controller-7d57d96b65-4dwj2 Pod phase: Running level=info timestamp=2018-08-03T02:37:05.382641Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-88mrf Pod phase: Running level=info timestamp=2018-08-03T02:54:51.127252Z pos=vm.go:377 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif5r9l kind= uid=9786d524-96c8-11e8-bbc9-525500d15501 msg="Setting stabile UUID '6ede0a62-9cb7-530e-9f2f-dba2865b3634' (was '')" level=info timestamp=2018-08-03T02:54:51.146180Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif5r9l kind= uid=978adf9c-96c8-11e8-bbc9-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=info timestamp=2018-08-03T02:54:51.146267Z pos=vm.go:470 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif5r9l kind= uid=978adf9c-96c8-11e8-bbc9-525500d15501 msg="VirtualMachineInstance created bacause testvmif5r9l was added." level=info timestamp=2018-08-03T02:54:51.146318Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif5r9l kind= uid=978adf9c-96c8-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T02:54:51.146392Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif5r9l kind= uid=978adf9c-96c8-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T02:54:51.147225Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif5r9l kind= uid=9786d524-96c8-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:54:51.147261Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif5r9l kind= uid=9786d524-96c8-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:54:51.161256Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif5r9l kind= uid=9786d524-96c8-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:54:51.161297Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif5r9l kind= uid=9786d524-96c8-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:54:51.161786Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif5r9l kind= uid=9786d524-96c8-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:54:51.161833Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif5r9l kind= uid=9786d524-96c8-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:54:51.197591Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif5r9l kind= uid=9786d524-96c8-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:54:51.197674Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif5r9l kind= uid=9786d524-96c8-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T02:54:51.230337Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif5r9l kind= uid=9786d524-96c8-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T02:54:51.230424Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif5r9l kind= uid=9786d524-96c8-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" Pod name: virt-handler-97tqs Pod phase: Running level=info timestamp=2018-08-03T02:37:07.058121Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-03T02:37:07.072121Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-03T02:37:07.074218Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-03T02:37:07.173500Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-03T02:37:07.185432Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-03T02:37:07.187158Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-handler-hmjbh Pod phase: Running level=info timestamp=2018-08-03T02:37:05.168290Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-03T02:37:05.190468Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-03T02:37:05.195930Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-03T02:37:05.295904Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-03T02:37:05.562268Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-03T02:37:05.563222Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=error timestamp=2018-08-03T02:37:05.867110Z pos=vm.go:800 component=virt-handler reason="open /proc/29783/cmdline: no such file or directory" msg="failed to set a cpu manager label on host node01" Pod name: virt-launcher-testvmif5r9l-hvddb Pod phase: Pending level=info timestamp=2018-08-03T02:54:54.172348Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-03T02:54:54.173881Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-03T02:54:54.176509Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-03T02:55:04.188182Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-03T02:55:04.250932Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmif5r9l" level=info timestamp=2018-08-03T02:55:04.255269Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-03T02:55:04.255635Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [301.480 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should start and stop VirtualMachineInstance multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:333 Timed out after 300.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:157 ------------------------------ STEP: Doing run: 0 STEP: Starting the VirtualMachineInstance STEP: VMI has the running condition Pod name: disks-images-provider-mjqgd Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-n7d9r Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8d5mf Pod phase: Running 2018/08/03 03:05:24 http: TLS handshake error from 10.129.0.1:44172: EOF level=info timestamp=2018-08-03T03:05:24.631231Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:05:24.633215Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:05:27.153540Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:05:31.521799Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:05:31.545516Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:05:31.563464Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:05:34 http: TLS handshake error from 10.129.0.1:44182: EOF level=info timestamp=2018-08-03T03:05:36.874614Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:05:37.198758Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:05:40.456587Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:05:40.469964Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:05:40.480256Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:05:44 http: TLS handshake error from 10.129.0.1:44190: EOF level=info timestamp=2018-08-03T03:05:47.240360Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-qtnl5 Pod phase: Running 2018/08/03 03:03:45 http: TLS handshake error from 10.129.0.1:35888: EOF 2018/08/03 03:03:55 http: TLS handshake error from 10.129.0.1:35896: EOF 2018/08/03 03:04:05 http: TLS handshake error from 10.129.0.1:35904: EOF 2018/08/03 03:04:15 http: TLS handshake error from 10.129.0.1:35912: EOF 2018/08/03 03:04:25 http: TLS handshake error from 10.129.0.1:35920: EOF 2018/08/03 03:04:35 http: TLS handshake error from 10.129.0.1:35928: EOF 2018/08/03 03:04:45 http: TLS handshake error from 10.129.0.1:35936: EOF 2018/08/03 03:04:55 http: TLS handshake error from 10.129.0.1:35944: EOF 2018/08/03 03:05:05 http: TLS handshake error from 10.129.0.1:35952: EOF level=info timestamp=2018-08-03T03:05:11.129780Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:05:15 http: TLS handshake error from 10.129.0.1:35960: EOF 2018/08/03 03:05:25 http: TLS handshake error from 10.129.0.1:35968: EOF 2018/08/03 03:05:35 http: TLS handshake error from 10.129.0.1:35978: EOF level=info timestamp=2018-08-03T03:05:41.099297Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:05:45 http: TLS handshake error from 10.129.0.1:35986: EOF Pod name: virt-controller-7d57d96b65-4dwj2 Pod phase: Running level=info timestamp=2018-08-03T02:37:05.382641Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-88mrf Pod phase: Running level=info timestamp=2018-08-03T03:05:11.696334Z pos=vm.go:377 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7sqmq kind= uid=4b39fda3-96c9-11e8-bbc9-525500d15501 msg="Setting stabile UUID 'dc8f1115-2de4-5109-b09e-f4e9c3fe39e4' (was '')" level=info timestamp=2018-08-03T03:05:11.710671Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7sqmq kind= uid=096e285d-96ca-11e8-bbc9-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=info timestamp=2018-08-03T03:05:11.710731Z pos=vm.go:470 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7sqmq kind= uid=096e285d-96ca-11e8-bbc9-525500d15501 msg="VirtualMachineInstance created bacause testvmi7sqmq was added." level=info timestamp=2018-08-03T03:05:11.710785Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7sqmq kind= uid=096e285d-96ca-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:05:11.710931Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7sqmq kind= uid=096e285d-96ca-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:05:11.727255Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7sqmq kind= uid=4b39fda3-96c9-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T03:05:11.727306Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7sqmq kind= uid=4b39fda3-96c9-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T03:05:11.727816Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7sqmq kind= uid=4b39fda3-96c9-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T03:05:11.727873Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7sqmq kind= uid=4b39fda3-96c9-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T03:05:11.739049Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7sqmq kind= uid=4b39fda3-96c9-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T03:05:11.739102Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7sqmq kind= uid=4b39fda3-96c9-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T03:05:11.770761Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7sqmq kind= uid=4b39fda3-96c9-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T03:05:11.770845Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7sqmq kind= uid=4b39fda3-96c9-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T03:05:11.782508Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7sqmq kind= uid=4b39fda3-96c9-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T03:05:11.782564Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7sqmq kind= uid=4b39fda3-96c9-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" Pod name: virt-handler-97tqs Pod phase: Running level=info timestamp=2018-08-03T02:37:07.058121Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-03T02:37:07.072121Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-03T02:37:07.074218Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-03T02:37:07.173500Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-03T02:37:07.185432Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-03T02:37:07.187158Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-handler-hmjbh Pod phase: Running level=info timestamp=2018-08-03T02:37:05.168290Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-03T02:37:05.190468Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-03T02:37:05.195930Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-03T02:37:05.295904Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-03T02:37:05.562268Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-03T02:37:05.563222Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=error timestamp=2018-08-03T02:37:05.867110Z pos=vm.go:800 component=virt-handler reason="open /proc/29783/cmdline: no such file or directory" msg="failed to set a cpu manager label on host node01" Pod name: virt-launcher-testvmi7sqmq-8pd5s Pod phase: Pending level=info timestamp=2018-08-03T03:05:15.060721Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-03T03:05:15.061840Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-03T03:05:15.064472Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-03T03:05:25.073076Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-03T03:05:25.121079Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi7sqmq" level=info timestamp=2018-08-03T03:05:25.123655Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-03T03:05:25.124172Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [360.453 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should not update the VirtualMachineInstance spec if Running [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:346 Timed out after 360.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:353 ------------------------------ Pod name: disks-images-provider-mjqgd Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-n7d9r Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8d5mf Pod phase: Running 2018/08/03 03:10:14 http: TLS handshake error from 10.129.0.1:44408: EOF level=info timestamp=2018-08-03T03:10:18.601978Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:10:24 http: TLS handshake error from 10.129.0.1:44416: EOF level=info timestamp=2018-08-03T03:10:25.572303Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:10:25.586538Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:10:28.651096Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:10:34 http: TLS handshake error from 10.129.0.1:44424: EOF level=info timestamp=2018-08-03T03:10:37.420837Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:10:38.704297Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:10:41.111120Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:10:44 http: TLS handshake error from 10.129.0.1:44432: EOF level=info timestamp=2018-08-03T03:10:48.750654Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:10:53.450841Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-03T03:10:53.451975Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/08/03 03:10:54 http: TLS handshake error from 10.129.0.1:44440: EOF Pod name: virt-api-7d79764579-qtnl5 Pod phase: Running 2018/08/03 03:08:45 http: TLS handshake error from 10.129.0.1:36132: EOF 2018/08/03 03:08:55 http: TLS handshake error from 10.129.0.1:36140: EOF 2018/08/03 03:09:05 http: TLS handshake error from 10.129.0.1:36148: EOF 2018/08/03 03:09:15 http: TLS handshake error from 10.129.0.1:36156: EOF 2018/08/03 03:09:25 http: TLS handshake error from 10.129.0.1:36164: EOF 2018/08/03 03:09:35 http: TLS handshake error from 10.129.0.1:36172: EOF level=info timestamp=2018-08-03T03:09:40.313261Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-03T03:09:40.617203Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:09:45 http: TLS handshake error from 10.129.0.1:36180: EOF 2018/08/03 03:09:55 http: TLS handshake error from 10.129.0.1:36188: EOF 2018/08/03 03:10:05 http: TLS handshake error from 10.129.0.1:36196: EOF 2018/08/03 03:10:15 http: TLS handshake error from 10.129.0.1:36204: EOF 2018/08/03 03:10:25 http: TLS handshake error from 10.129.0.1:36212: EOF 2018/08/03 03:10:35 http: TLS handshake error from 10.129.0.1:36220: EOF 2018/08/03 03:10:45 http: TLS handshake error from 10.129.0.1:36228: EOF Pod name: virt-controller-7d57d96b65-4dwj2 Pod phase: Running level=info timestamp=2018-08-03T02:37:05.382641Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-88mrf Pod phase: Running level=info timestamp=2018-08-03T03:05:53.039998Z pos=vm.go:377 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi878cr kind= uid=220ff272-96ca-11e8-bbc9-525500d15501 msg="Setting stabile UUID '24d858e0-65b8-5590-8783-307b88ef7be5' (was '')" level=info timestamp=2018-08-03T03:05:53.052557Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi878cr kind= uid=22127d73-96ca-11e8-bbc9-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=info timestamp=2018-08-03T03:05:53.052707Z pos=vm.go:470 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi878cr kind= uid=22127d73-96ca-11e8-bbc9-525500d15501 msg="VirtualMachineInstance created bacause testvmi878cr was added." level=info timestamp=2018-08-03T03:05:53.052840Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi878cr kind= uid=220ff272-96ca-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T03:05:53.052963Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi878cr kind= uid=220ff272-96ca-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T03:05:53.053441Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi878cr kind= uid=22127d73-96ca-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:05:53.053738Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi878cr kind= uid=22127d73-96ca-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:05:53.064464Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi878cr kind= uid=220ff272-96ca-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T03:05:53.064504Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi878cr kind= uid=220ff272-96ca-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T03:05:53.067223Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi878cr kind= uid=220ff272-96ca-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T03:05:53.067267Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi878cr kind= uid=220ff272-96ca-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T03:05:53.109765Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi878cr kind= uid=220ff272-96ca-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T03:05:53.109856Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi878cr kind= uid=220ff272-96ca-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T03:05:53.125072Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi878cr kind= uid=220ff272-96ca-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T03:05:53.125180Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi878cr kind= uid=220ff272-96ca-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" Pod name: virt-handler-97tqs Pod phase: Running level=info timestamp=2018-08-03T02:37:07.058121Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-03T02:37:07.072121Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-03T02:37:07.074218Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-03T02:37:07.173500Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-03T02:37:07.185432Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-03T02:37:07.187158Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-handler-hmjbh Pod phase: Running level=info timestamp=2018-08-03T02:37:05.168290Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-03T02:37:05.190468Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-03T02:37:05.195930Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-03T02:37:05.295904Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-03T02:37:05.562268Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-03T02:37:05.563222Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=error timestamp=2018-08-03T02:37:05.867110Z pos=vm.go:800 component=virt-handler reason="open /proc/29783/cmdline: no such file or directory" msg="failed to set a cpu manager label on host node01" Pod name: virt-launcher-testvmi878cr-bfkrv Pod phase: Pending level=info timestamp=2018-08-03T03:05:56.725672Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-03T03:05:56.726538Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-03T03:05:56.728615Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-03T03:06:06.734915Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-03T03:06:06.780604Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi878cr" level=info timestamp=2018-08-03T03:06:06.784868Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-03T03:06:06.785115Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [301.462 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should survive guest shutdown, multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:387 Timed out after 300.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:157 ------------------------------ STEP: Creating new VMI, not running STEP: Starting the VirtualMachineInstance STEP: VMI has the running condition VM testvmifwhqb was scheduled to start Pod name: disks-images-provider-mjqgd Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-n7d9r Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8d5mf Pod phase: Running level=info timestamp=2018-08-03T03:16:07.997737Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:16:10.350655Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:16:14 http: TLS handshake error from 10.129.0.1:44700: EOF level=info timestamp=2018-08-03T03:16:20.406708Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:16:24 http: TLS handshake error from 10.129.0.1:44708: EOF level=info timestamp=2018-08-03T03:16:26.959115Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:16:26.964749Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:16:30.451720Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:16:34 http: TLS handshake error from 10.129.0.1:44716: EOF level=info timestamp=2018-08-03T03:16:38.056218Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:16:40.498015Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:16:41.097219Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:16:44 http: TLS handshake error from 10.129.0.1:44724: EOF level=info timestamp=2018-08-03T03:16:50.547971Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:16:54 http: TLS handshake error from 10.129.0.1:44732: EOF Pod name: virt-api-7d79764579-qtnl5 Pod phase: Running level=info timestamp=2018-08-03T03:14:41.144764Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:14:45 http: TLS handshake error from 10.129.0.1:36424: EOF 2018/08/03 03:14:55 http: TLS handshake error from 10.129.0.1:36432: EOF 2018/08/03 03:15:05 http: TLS handshake error from 10.129.0.1:36440: EOF 2018/08/03 03:15:15 http: TLS handshake error from 10.129.0.1:36448: EOF 2018/08/03 03:15:25 http: TLS handshake error from 10.129.0.1:36456: EOF 2018/08/03 03:15:35 http: TLS handshake error from 10.129.0.1:36464: EOF 2018/08/03 03:15:45 http: TLS handshake error from 10.129.0.1:36472: EOF 2018/08/03 03:15:55 http: TLS handshake error from 10.129.0.1:36480: EOF 2018/08/03 03:16:05 http: TLS handshake error from 10.129.0.1:36488: EOF level=info timestamp=2018-08-03T03:16:11.089309Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:16:15 http: TLS handshake error from 10.129.0.1:36496: EOF 2018/08/03 03:16:25 http: TLS handshake error from 10.129.0.1:36504: EOF 2018/08/03 03:16:35 http: TLS handshake error from 10.129.0.1:36512: EOF 2018/08/03 03:16:45 http: TLS handshake error from 10.129.0.1:36520: EOF Pod name: virt-controller-7d57d96b65-4dwj2 Pod phase: Running level=info timestamp=2018-08-03T02:37:05.382641Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-88mrf Pod phase: Running level=info timestamp=2018-08-03T03:16:16.699420Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwhqb kind= uid=d5bf5beb-96ca-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T03:16:16.699472Z pos=vm.go:377 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwhqb kind= uid=d5bf5beb-96ca-11e8-bbc9-525500d15501 msg="Setting stabile UUID '816d9d31-ee74-52e9-9aec-2f3ae8129061' (was '')" level=info timestamp=2018-08-03T03:16:16.711548Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwhqb kind= uid=95cd6d36-96cb-11e8-bbc9-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=info timestamp=2018-08-03T03:16:16.711592Z pos=vm.go:470 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwhqb kind= uid=95cd6d36-96cb-11e8-bbc9-525500d15501 msg="VirtualMachineInstance created bacause testvmifwhqb was added." level=info timestamp=2018-08-03T03:16:16.711637Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwhqb kind= uid=95cd6d36-96cb-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:16:16.711716Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwhqb kind= uid=95cd6d36-96cb-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:16:16.734781Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwhqb kind= uid=d5bf5beb-96ca-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T03:16:16.734828Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwhqb kind= uid=d5bf5beb-96ca-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T03:16:16.752962Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwhqb kind= uid=d5bf5beb-96ca-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T03:16:16.753016Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwhqb kind= uid=d5bf5beb-96ca-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T03:16:16.774120Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwhqb kind= uid=d5bf5beb-96ca-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T03:16:16.774209Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwhqb kind= uid=d5bf5beb-96ca-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T03:16:16.775920Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifwhqb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifwhqb" level=info timestamp=2018-08-03T03:16:16.804621Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwhqb kind= uid=d5bf5beb-96ca-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T03:16:16.804705Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwhqb kind= uid=d5bf5beb-96ca-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" Pod name: virt-handler-97tqs Pod phase: Running level=info timestamp=2018-08-03T02:37:07.058121Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-03T02:37:07.072121Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-03T02:37:07.074218Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-03T02:37:07.173500Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-03T02:37:07.185432Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-03T02:37:07.187158Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-handler-hmjbh Pod phase: Running level=info timestamp=2018-08-03T02:37:05.168290Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-03T02:37:05.190468Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-03T02:37:05.195930Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-03T02:37:05.295904Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-03T02:37:05.562268Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-03T02:37:05.563222Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=error timestamp=2018-08-03T02:37:05.867110Z pos=vm.go:800 component=virt-handler reason="open /proc/29783/cmdline: no such file or directory" msg="failed to set a cpu manager label on host node01" Pod name: virt-launcher-testvmifwhqb-d2mqd Pod phase: Pending level=info timestamp=2018-08-03T03:16:19.894419Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-03T03:16:19.894702Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-03T03:16:19.896240Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-03T03:16:29.903467Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-03T03:16:29.947116Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmifwhqb" level=info timestamp=2018-08-03T03:16:29.950167Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-03T03:16:29.950418Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [360.445 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 Using virtctl interface /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:435 should start a VirtualMachineInstance once [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:436 Timed out after 360.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:453 ------------------------------ STEP: getting an VMI STEP: Invoking virtctl start STEP: Getting the status of the VMI Pod name: disks-images-provider-mjqgd Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-n7d9r Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8d5mf Pod phase: Running 2018/08/03 03:22:14 http: TLS handshake error from 10.129.0.1:44992: EOF level=info timestamp=2018-08-03T03:22:22.100853Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:22:24 http: TLS handshake error from 10.129.0.1:45000: EOF level=info timestamp=2018-08-03T03:22:28.497637Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:22:28.500759Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:22:32.140037Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:22:34 http: TLS handshake error from 10.129.0.1:45008: EOF level=info timestamp=2018-08-03T03:22:38.747426Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:22:41.134844Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-03T03:22:42.187317Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:22:44 http: TLS handshake error from 10.129.0.1:45016: EOF level=info timestamp=2018-08-03T03:22:52.237407Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:22:53.653637Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-03T03:22:53.654739Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/08/03 03:22:54 http: TLS handshake error from 10.129.0.1:45024: EOF Pod name: virt-api-7d79764579-qtnl5 Pod phase: Running 2018/08/03 03:20:45 http: TLS handshake error from 10.129.0.1:36716: EOF 2018/08/03 03:20:55 http: TLS handshake error from 10.129.0.1:36724: EOF 2018/08/03 03:21:05 http: TLS handshake error from 10.129.0.1:36732: EOF level=info timestamp=2018-08-03T03:21:11.104346Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:21:15 http: TLS handshake error from 10.129.0.1:36740: EOF 2018/08/03 03:21:25 http: TLS handshake error from 10.129.0.1:36748: EOF 2018/08/03 03:21:35 http: TLS handshake error from 10.129.0.1:36756: EOF level=info timestamp=2018-08-03T03:21:41.143591Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:21:45 http: TLS handshake error from 10.129.0.1:36764: EOF 2018/08/03 03:21:55 http: TLS handshake error from 10.129.0.1:36772: EOF 2018/08/03 03:22:05 http: TLS handshake error from 10.129.0.1:36780: EOF 2018/08/03 03:22:15 http: TLS handshake error from 10.129.0.1:36788: EOF 2018/08/03 03:22:25 http: TLS handshake error from 10.129.0.1:36796: EOF 2018/08/03 03:22:35 http: TLS handshake error from 10.129.0.1:36804: EOF 2018/08/03 03:22:45 http: TLS handshake error from 10.129.0.1:36812: EOF Pod name: virt-controller-7d57d96b65-4dwj2 Pod phase: Running level=info timestamp=2018-08-03T02:37:05.382641Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-88mrf Pod phase: Running level=info timestamp=2018-08-03T03:22:19.708819Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqv8p4 kind= uid=6e2a5135-96cc-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:22:19.708934Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqv8p4 kind= uid=6e2a5135-96cc-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:22:19.708738Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqv8p4 kind= uid=6e2a5135-96cc-11e8-bbc9-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=info timestamp=2018-08-03T03:22:19.709452Z pos=vm.go:470 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqv8p4 kind= uid=6e2a5135-96cc-11e8-bbc9-525500d15501 msg="VirtualMachineInstance created bacause testvmiqv8p4 was added." level=info timestamp=2018-08-03T03:22:19.721809Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqv8p4 kind= uid=ac9700fd-96cb-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T03:22:19.721887Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqv8p4 kind= uid=ac9700fd-96cb-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T03:22:19.732411Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqv8p4 kind= uid=ac9700fd-96cb-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T03:22:19.732481Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqv8p4 kind= uid=ac9700fd-96cb-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T03:22:19.751110Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqv8p4 kind= uid=ac9700fd-96cb-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T03:22:19.751183Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqv8p4 kind= uid=ac9700fd-96cb-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T03:22:19.773280Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqv8p4 kind= uid=ac9700fd-96cb-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T03:22:19.773349Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqv8p4 kind= uid=ac9700fd-96cb-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T03:22:19.795004Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqv8p4 kind= uid=ac9700fd-96cb-11e8-bbc9-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-03T03:22:19.795062Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqv8p4 kind= uid=ac9700fd-96cb-11e8-bbc9-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-08-03T03:22:19.795360Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqv8p4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqv8p4" Pod name: virt-handler-97tqs Pod phase: Running level=info timestamp=2018-08-03T02:37:07.058121Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-03T02:37:07.072121Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-03T02:37:07.074218Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-03T02:37:07.173500Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-03T02:37:07.185432Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-03T02:37:07.187158Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-handler-hmjbh Pod phase: Running level=info timestamp=2018-08-03T02:37:05.168290Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-03T02:37:05.190468Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-03T02:37:05.195930Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-03T02:37:05.295904Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-03T02:37:05.562268Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-03T02:37:05.563222Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=error timestamp=2018-08-03T02:37:05.867110Z pos=vm.go:800 component=virt-handler reason="open /proc/29783/cmdline: no such file or directory" msg="failed to set a cpu manager label on host node01" Pod name: virt-launcher-testvmiqv8p4-jtkcl Pod phase: Pending level=info timestamp=2018-08-03T03:22:22.800419Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-03T03:22:22.803375Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-03T03:22:22.806007Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-03T03:22:32.814397Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-03T03:22:32.857560Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiqv8p4" level=info timestamp=2018-08-03T03:22:32.859290Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-03T03:22:32.859449Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [360.437 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 Using virtctl interface /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:435 should stop a VirtualMachineInstance once [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:467 Timed out after 360.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:480 ------------------------------ STEP: getting an VMI STEP: Invoking virtctl stop STEP: Ensuring VMI is running • [SLOW TEST:36.224 seconds] LeaderElection /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:43 Start a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:53 when the controller pod is not running /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:54 should success /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:55 ------------------------------ • [SLOW TEST:8.271 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:7.781 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given an vm /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:7.879 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi preset /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:7.733 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi replica set /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:55.587 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance 2018/08/02 23:24:59 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with Disk PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ 2018/08/02 23:25:51 read closing down: EOF • [SLOW TEST:51.761 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with CDRom PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ 2018/08/02 23:27:52 read closing down: EOF • [SLOW TEST:159.081 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started and stopped multiple times /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with Disk PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ 2018/08/02 23:30:37 read closing down: EOF • [SLOW TEST:170.305 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started and stopped multiple times /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with CDRom PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ Pod name: disks-images-provider-mjqgd Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-n7d9r Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8d5mf Pod phase: Running 2018/08/03 03:33:34 http: TLS handshake error from 10.129.0.1:45548: EOF level=info timestamp=2018-08-03T03:33:36.575954Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:33:40.670101Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:33:44 http: TLS handshake error from 10.129.0.1:45556: EOF level=info timestamp=2018-08-03T03:33:46.623435Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:33:54 http: TLS handshake error from 10.129.0.1:45564: EOF level=info timestamp=2018-08-03T03:33:56.672438Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:34:01.408309Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:34:01.408895Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:34:04 http: TLS handshake error from 10.129.0.1:45572: EOF level=info timestamp=2018-08-03T03:34:06.720094Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:34:10.721657Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:34:11.151320Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:34:14 http: TLS handshake error from 10.129.0.1:45580: EOF level=info timestamp=2018-08-03T03:34:16.760794Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-qtnl5 Pod phase: Running 2018/08/03 03:32:25 http: TLS handshake error from 10.129.0.1:37284: EOF 2018/08/03 03:32:35 http: TLS handshake error from 10.129.0.1:37292: EOF level=info timestamp=2018-08-03T03:32:41.127271Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:32:45 http: TLS handshake error from 10.129.0.1:37302: EOF 2018/08/03 03:32:55 http: TLS handshake error from 10.129.0.1:37310: EOF 2018/08/03 03:33:05 http: TLS handshake error from 10.129.0.1:37318: EOF level=info timestamp=2018-08-03T03:33:11.153597Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:33:15 http: TLS handshake error from 10.129.0.1:37326: EOF 2018/08/03 03:33:25 http: TLS handshake error from 10.129.0.1:37334: EOF 2018/08/03 03:33:35 http: TLS handshake error from 10.129.0.1:37342: EOF level=info timestamp=2018-08-03T03:33:41.160371Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:33:45 http: TLS handshake error from 10.129.0.1:37350: EOF 2018/08/03 03:33:55 http: TLS handshake error from 10.129.0.1:37358: EOF 2018/08/03 03:34:05 http: TLS handshake error from 10.129.0.1:37366: EOF 2018/08/03 03:34:15 http: TLS handshake error from 10.129.0.1:37374: EOF Pod name: virt-controller-7d57d96b65-4dwj2 Pod phase: Running level=info timestamp=2018-08-03T03:27:02.734571Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicgg88 kind= uid=16a6672f-96cd-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:27:02.735306Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicgg88 kind= uid=16a6672f-96cd-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:28:29.890767Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim6mwt kind= uid=4a998ed7-96cd-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:28:29.890890Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim6mwt kind= uid=4a998ed7-96cd-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:28:29.943724Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmim6mwt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmim6mwt" level=info timestamp=2018-08-03T03:29:10.364909Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim6mwt kind= uid=62b95009-96cd-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:29:10.365809Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim6mwt kind= uid=62b95009-96cd-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:29:10.479419Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmim6mwt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmim6mwt" level=info timestamp=2018-08-03T03:29:10.507917Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmim6mwt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmim6mwt" level=info timestamp=2018-08-03T03:29:46.406320Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim6mwt kind= uid=7834f965-96cd-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:29:46.406433Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim6mwt kind= uid=7834f965-96cd-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:29:46.544723Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmim6mwt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmim6mwt" level=info timestamp=2018-08-03T03:31:20.209593Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwprlc kind= uid=b01ddac9-96cd-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:31:20.210450Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwprlc kind= uid=b01ddac9-96cd-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:31:20.293528Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwprlc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwprlc" Pod name: virt-controller-7d57d96b65-t8v76 Pod phase: Running level=info timestamp=2018-08-03T03:22:58.685906Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-97tqs Pod phase: Running level=info timestamp=2018-08-03T03:23:32.294434Z pos=vm.go:338 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-03T03:23:32.294551Z pos=vm.go:365 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-03T03:23:32.294587Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-03T03:23:32.295327Z pos=vm.go:592 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmilbclt" level=info timestamp=2018-08-03T03:23:32.300830Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-03T03:23:32.301003Z pos=vm.go:753 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-03T03:23:32.301075Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:23:32.301121Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmilbclt, existing: false\n" level=info timestamp=2018-08-03T03:23:32.302218Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:23:32.302406Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:23:32.303237Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:23:32.303311Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmilbclt, existing: false\n" level=info timestamp=2018-08-03T03:23:32.303332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:23:32.303413Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:23:32.303510Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-hmjbh Pod phase: Running level=info timestamp=2018-08-03T03:31:19.384352Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmim6mwt, existing: false\n" level=info timestamp=2018-08-03T03:31:19.384423Z pos=vm.go:336 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-03T03:31:19.384444Z pos=vm.go:338 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-03T03:31:19.384519Z pos=vm.go:365 component=virt-handler namespace=kubevirt-test-default name=testvmim6mwt kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-03T03:31:19.384557Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmim6mwt kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-03T03:31:19.384588Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmim6mwt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:31:35.196761Z pos=vm.go:753 component=virt-handler namespace=kubevirt-test-default name=testvmim6mwt kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-03T03:31:35.197008Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmim6mwt, existing: false\n" level=info timestamp=2018-08-03T03:31:35.197035Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:31:35.197077Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmim6mwt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:31:35.197289Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmim6mwt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:31:35.197428Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmim6mwt, existing: false\n" level=info timestamp=2018-08-03T03:31:35.197468Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:31:35.197517Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmim6mwt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:31:35.197596Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmim6mwt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiwprlc-76lg6 Pod phase: Pending level=info timestamp=2018-08-03T03:31:22.571712Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-03T03:31:22.573186Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-03T03:31:22.575945Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-03T03:31:32.587001Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-03T03:31:32.605849Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiwprlc" level=info timestamp=2018-08-03T03:31:32.607443Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-03T03:31:32.607615Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.302 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With an emptyDisk defined /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:113 should create a writeable emptyDisk with the right capacity [It] /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:115 Timed out after 90.005s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1075 ------------------------------ STEP: Starting a VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance will start level=info timestamp=2018-08-03T03:31:20.592687Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiwprlc kind=VirtualMachineInstance uid=b01ddac9-96cd-11e8-bbc9-525500d15501 msg="Created virtual machine pod virt-launcher-testvmiwprlc-76lg6" Pod name: disks-images-provider-mjqgd Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-n7d9r Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8d5mf Pod phase: Running 2018/08/03 03:36:34 http: TLS handshake error from 10.129.0.1:45694: EOF level=info timestamp=2018-08-03T03:36:37.410608Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:36:41.022115Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:36:44 http: TLS handshake error from 10.129.0.1:45702: EOF level=info timestamp=2018-08-03T03:36:47.462533Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:36:54 http: TLS handshake error from 10.129.0.1:45710: EOF level=info timestamp=2018-08-03T03:36:57.512049Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:37:02.099284Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:37:02.148867Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:37:04 http: TLS handshake error from 10.129.0.1:45718: EOF level=info timestamp=2018-08-03T03:37:07.562883Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:37:11.121830Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-03T03:37:11.192069Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:37:14 http: TLS handshake error from 10.129.0.1:45726: EOF level=info timestamp=2018-08-03T03:37:17.612086Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-qtnl5 Pod phase: Running 2018/08/03 03:35:15 http: TLS handshake error from 10.129.0.1:37422: EOF 2018/08/03 03:35:25 http: TLS handshake error from 10.129.0.1:37430: EOF 2018/08/03 03:35:35 http: TLS handshake error from 10.129.0.1:37438: EOF level=info timestamp=2018-08-03T03:35:41.117412Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:35:45 http: TLS handshake error from 10.129.0.1:37448: EOF 2018/08/03 03:35:55 http: TLS handshake error from 10.129.0.1:37456: EOF 2018/08/03 03:36:05 http: TLS handshake error from 10.129.0.1:37464: EOF 2018/08/03 03:36:15 http: TLS handshake error from 10.129.0.1:37472: EOF 2018/08/03 03:36:25 http: TLS handshake error from 10.129.0.1:37480: EOF 2018/08/03 03:36:35 http: TLS handshake error from 10.129.0.1:37488: EOF level=info timestamp=2018-08-03T03:36:41.147372Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:36:45 http: TLS handshake error from 10.129.0.1:37496: EOF 2018/08/03 03:36:55 http: TLS handshake error from 10.129.0.1:37504: EOF 2018/08/03 03:37:05 http: TLS handshake error from 10.129.0.1:37512: EOF 2018/08/03 03:37:15 http: TLS handshake error from 10.129.0.1:37520: EOF Pod name: virt-controller-7d57d96b65-4dwj2 Pod phase: Running level=info timestamp=2018-08-03T03:28:29.890767Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim6mwt kind= uid=4a998ed7-96cd-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:28:29.890890Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim6mwt kind= uid=4a998ed7-96cd-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:28:29.943724Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmim6mwt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmim6mwt" level=info timestamp=2018-08-03T03:29:10.364909Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim6mwt kind= uid=62b95009-96cd-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:29:10.365809Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim6mwt kind= uid=62b95009-96cd-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:29:10.479419Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmim6mwt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmim6mwt" level=info timestamp=2018-08-03T03:29:10.507917Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmim6mwt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmim6mwt" level=info timestamp=2018-08-03T03:29:46.406320Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim6mwt kind= uid=7834f965-96cd-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:29:46.406433Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim6mwt kind= uid=7834f965-96cd-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:29:46.544723Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmim6mwt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmim6mwt" level=info timestamp=2018-08-03T03:31:20.209593Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwprlc kind= uid=b01ddac9-96cd-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:31:20.210450Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwprlc kind= uid=b01ddac9-96cd-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:31:20.293528Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwprlc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwprlc" level=info timestamp=2018-08-03T03:34:20.667557Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi92pk8 kind= uid=1baddf3f-96ce-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:34:20.667739Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi92pk8 kind= uid=1baddf3f-96ce-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-t8v76 Pod phase: Running level=info timestamp=2018-08-03T03:22:58.685906Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-97tqs Pod phase: Running level=info timestamp=2018-08-03T03:23:32.294434Z pos=vm.go:338 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-03T03:23:32.294551Z pos=vm.go:365 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-03T03:23:32.294587Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-03T03:23:32.295327Z pos=vm.go:592 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmilbclt" level=info timestamp=2018-08-03T03:23:32.300830Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-03T03:23:32.301003Z pos=vm.go:753 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-03T03:23:32.301075Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:23:32.301121Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmilbclt, existing: false\n" level=info timestamp=2018-08-03T03:23:32.302218Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:23:32.302406Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:23:32.303237Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:23:32.303311Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmilbclt, existing: false\n" level=info timestamp=2018-08-03T03:23:32.303332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:23:32.303413Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:23:32.303510Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-hmjbh Pod phase: Running level=info timestamp=2018-08-03T03:31:19.384352Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmim6mwt, existing: false\n" level=info timestamp=2018-08-03T03:31:19.384423Z pos=vm.go:336 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-03T03:31:19.384444Z pos=vm.go:338 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-03T03:31:19.384519Z pos=vm.go:365 component=virt-handler namespace=kubevirt-test-default name=testvmim6mwt kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-03T03:31:19.384557Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmim6mwt kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-03T03:31:19.384588Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmim6mwt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:31:35.196761Z pos=vm.go:753 component=virt-handler namespace=kubevirt-test-default name=testvmim6mwt kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-03T03:31:35.197008Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmim6mwt, existing: false\n" level=info timestamp=2018-08-03T03:31:35.197035Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:31:35.197077Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmim6mwt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:31:35.197289Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmim6mwt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:31:35.197428Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmim6mwt, existing: false\n" level=info timestamp=2018-08-03T03:31:35.197468Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:31:35.197517Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmim6mwt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:31:35.197596Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmim6mwt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi92pk8-z8tnn Pod phase: Pending level=info timestamp=2018-08-03T03:34:23.697495Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-03T03:34:23.698210Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-03T03:34:23.700410Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-03T03:34:33.707381Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-03T03:34:33.753708Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi92pk8" level=info timestamp=2018-08-03T03:34:33.756128Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-03T03:34:33.756345Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.460 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With an emptyDisk defined and a specified serial number /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:163 should create a writeable emptyDisk with the specified serial number [It] /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:165 Timed out after 90.004s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1075 ------------------------------ STEP: Starting a VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance will start level=info timestamp=2018-08-03T03:34:21.030467Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi92pk8 kind=VirtualMachineInstance uid=1baddf3f-96ce-11e8-bbc9-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi92pk8-z8tnn" • [SLOW TEST:50.830 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With ephemeral alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:205 should be successfully started /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:207 ------------------------------ 2018/08/02 23:38:12 read closing down: EOF • [SLOW TEST:114.655 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With ephemeral alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:205 should not persist data /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:218 ------------------------------ 2018/08/02 23:40:06 read closing down: EOF 2018/08/02 23:40:06 read closing down: EOF 2018/08/02 23:42:59 read closing down: EOF • [SLOW TEST:173.186 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With VirtualMachineInstance with two PVCs /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:266 should start vmi multiple times /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:278 ------------------------------ • Pod name: disks-images-provider-mjqgd Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-n7d9r Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8d5mf Pod phase: Running level=info timestamp=2018-08-03T03:45:12.457888Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:45:14 http: TLS handshake error from 10.129.0.1:46124: EOF level=info timestamp=2018-08-03T03:45:20.224356Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:45:24 http: TLS handshake error from 10.129.0.1:46132: EOF level=info timestamp=2018-08-03T03:45:30.272304Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:45:34 http: TLS handshake error from 10.129.0.1:46140: EOF level=info timestamp=2018-08-03T03:45:34.807388Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:45:34.810367Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:45:40.326091Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:45:42.510200Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:45:44 http: TLS handshake error from 10.129.0.1:46148: EOF level=info timestamp=2018-08-03T03:45:50.375299Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:45:54 http: TLS handshake error from 10.129.0.1:46156: EOF level=info timestamp=2018-08-03T03:45:55.216622Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-03T03:45:55.217651Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-qtnl5 Pod phase: Running 2018/08/03 03:44:05 http: TLS handshake error from 10.129.0.1:37860: EOF level=info timestamp=2018-08-03T03:44:11.156897Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:44:15 http: TLS handshake error from 10.129.0.1:37868: EOF 2018/08/03 03:44:25 http: TLS handshake error from 10.129.0.1:37876: EOF 2018/08/03 03:44:35 http: TLS handshake error from 10.129.0.1:37884: EOF 2018/08/03 03:44:45 http: TLS handshake error from 10.129.0.1:37894: EOF 2018/08/03 03:44:55 http: TLS handshake error from 10.129.0.1:37902: EOF 2018/08/03 03:45:05 http: TLS handshake error from 10.129.0.1:37910: EOF level=info timestamp=2018-08-03T03:45:11.169413Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:45:15 http: TLS handshake error from 10.129.0.1:37918: EOF 2018/08/03 03:45:25 http: TLS handshake error from 10.129.0.1:37926: EOF 2018/08/03 03:45:35 http: TLS handshake error from 10.129.0.1:37934: EOF level=info timestamp=2018-08-03T03:45:41.153215Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:45:45 http: TLS handshake error from 10.129.0.1:37942: EOF 2018/08/03 03:45:55 http: TLS handshake error from 10.129.0.1:37950: EOF Pod name: virt-controller-7d57d96b65-4dwj2 Pod phase: Running level=info timestamp=2018-08-03T03:40:06.792754Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivcbcg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivcbcg" level=info timestamp=2018-08-03T03:40:06.810585Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivcbcg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivcbcg" level=info timestamp=2018-08-03T03:40:50.177287Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivcbcg kind= uid=03d82b0f-96cf-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:40:50.178017Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivcbcg kind= uid=03d82b0f-96cf-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:40:50.328416Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivcbcg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivcbcg" level=info timestamp=2018-08-03T03:40:50.343061Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivcbcg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivcbcg" level=info timestamp=2018-08-03T03:41:29.655797Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:41:29.655936Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:41:29.834429Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivcbcg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivcbcg" level=info timestamp=2018-08-03T03:42:59.673479Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvminzn89 kind= uid=5107a7bc-96cf-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:42:59.673630Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvminzn89 kind= uid=5107a7bc-96cf-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:42:59.835037Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminzn89\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvminzn89, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5107a7bc-96cf-11e8-bbc9-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminzn89" level=info timestamp=2018-08-03T03:42:59.959567Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi58jmt kind= uid=5132f9c2-96cf-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:42:59.959925Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi58jmt kind= uid=5132f9c2-96cf-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:43:00.039011Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi58jmt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi58jmt" Pod name: virt-controller-7d57d96b65-t8v76 Pod phase: Running level=info timestamp=2018-08-03T03:22:58.685906Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-97tqs Pod phase: Running level=info timestamp=2018-08-03T03:23:32.294434Z pos=vm.go:338 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-03T03:23:32.294551Z pos=vm.go:365 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-03T03:23:32.294587Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-03T03:23:32.295327Z pos=vm.go:592 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmilbclt" level=info timestamp=2018-08-03T03:23:32.300830Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-03T03:23:32.301003Z pos=vm.go:753 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-03T03:23:32.301075Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:23:32.301121Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmilbclt, existing: false\n" level=info timestamp=2018-08-03T03:23:32.302218Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:23:32.302406Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:23:32.303237Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:23:32.303311Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmilbclt, existing: false\n" level=info timestamp=2018-08-03T03:23:32.303332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:23:32.303413Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:23:32.303510Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-hmjbh Pod phase: Running level=info timestamp=2018-08-03T03:42:22.250645Z pos=vm.go:390 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Removing domain and ephemeral data for finalized vmi." level=info timestamp=2018-08-03T03:42:22.250666Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Processing deletion." level=info timestamp=2018-08-03T03:42:22.250825Z pos=vm.go:592 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Signaled deletion for testvmivcbcg" level=info timestamp=2018-08-03T03:42:22.255296Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:42:59.374467Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmivcbcg, existing: false\n" level=info timestamp=2018-08-03T03:42:59.374546Z pos=vm.go:336 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-03T03:42:59.374572Z pos=vm.go:338 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-03T03:42:59.374670Z pos=vm.go:365 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-03T03:42:59.374705Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-03T03:42:59.374766Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:43:05.196757Z pos=vm.go:753 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-03T03:43:05.196876Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmivcbcg, existing: false\n" level=info timestamp=2018-08-03T03:43:05.196916Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:43:05.196996Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:43:05.197145Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi58jmt-fv4x6 Pod phase: Pending level=info timestamp=2018-08-03T03:43:02.382622Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-03T03:43:02.384772Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-03T03:43:02.387876Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-03T03:43:12.398327Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-03T03:43:12.446282Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi58jmt" level=info timestamp=2018-08-03T03:43:12.449080Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-03T03:43:12.449576Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" ------------------------------ • Failure [180.490 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 should start it [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:80 Timed out after 90.004s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1088 ------------------------------ level=info timestamp=2018-08-03T03:43:00.393577Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi58jmt kind=VirtualMachineInstance uid=5132f9c2-96cf-11e8-bbc9-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi58jmt-fv4x6" Pod name: disks-images-provider-mjqgd Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-n7d9r Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8d5mf Pod phase: Running level=info timestamp=2018-08-03T03:48:05.359600Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:48:11.094191Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:48:12.787433Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:48:14 http: TLS handshake error from 10.129.0.1:46270: EOF level=info timestamp=2018-08-03T03:48:21.140286Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:48:24 http: TLS handshake error from 10.129.0.1:46278: EOF level=info timestamp=2018-08-03T03:48:31.183457Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:48:34 http: TLS handshake error from 10.129.0.1:46286: EOF level=info timestamp=2018-08-03T03:48:35.431842Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:48:35.432426Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:48:41.222315Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:48:42.829833Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:48:44 http: TLS handshake error from 10.129.0.1:46294: EOF level=info timestamp=2018-08-03T03:48:51.270365Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:48:54 http: TLS handshake error from 10.129.0.1:46302: EOF Pod name: virt-api-7d79764579-qtnl5 Pod phase: Running 2018/08/03 03:46:55 http: TLS handshake error from 10.129.0.1:37998: EOF 2018/08/03 03:47:05 http: TLS handshake error from 10.129.0.1:38006: EOF 2018/08/03 03:47:15 http: TLS handshake error from 10.129.0.1:38014: EOF 2018/08/03 03:47:25 http: TLS handshake error from 10.129.0.1:38022: EOF 2018/08/03 03:47:35 http: TLS handshake error from 10.129.0.1:38030: EOF 2018/08/03 03:47:45 http: TLS handshake error from 10.129.0.1:38040: EOF 2018/08/03 03:47:55 http: TLS handshake error from 10.129.0.1:38048: EOF 2018/08/03 03:48:05 http: TLS handshake error from 10.129.0.1:38056: EOF level=info timestamp=2018-08-03T03:48:11.187936Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:48:15 http: TLS handshake error from 10.129.0.1:38064: EOF 2018/08/03 03:48:25 http: TLS handshake error from 10.129.0.1:38072: EOF 2018/08/03 03:48:35 http: TLS handshake error from 10.129.0.1:38080: EOF level=info timestamp=2018-08-03T03:48:41.188921Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:48:45 http: TLS handshake error from 10.129.0.1:38088: EOF 2018/08/03 03:48:55 http: TLS handshake error from 10.129.0.1:38096: EOF Pod name: virt-controller-7d57d96b65-4dwj2 Pod phase: Running level=info timestamp=2018-08-03T03:40:50.177287Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivcbcg kind= uid=03d82b0f-96cf-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:40:50.178017Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivcbcg kind= uid=03d82b0f-96cf-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:40:50.328416Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivcbcg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivcbcg" level=info timestamp=2018-08-03T03:40:50.343061Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivcbcg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivcbcg" level=info timestamp=2018-08-03T03:41:29.655797Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:41:29.655936Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:41:29.834429Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivcbcg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivcbcg" level=info timestamp=2018-08-03T03:42:59.673479Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvminzn89 kind= uid=5107a7bc-96cf-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:42:59.673630Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvminzn89 kind= uid=5107a7bc-96cf-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:42:59.835037Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminzn89\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvminzn89, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5107a7bc-96cf-11e8-bbc9-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminzn89" level=info timestamp=2018-08-03T03:42:59.959567Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi58jmt kind= uid=5132f9c2-96cf-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:42:59.959925Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi58jmt kind= uid=5132f9c2-96cf-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:43:00.039011Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi58jmt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi58jmt" level=info timestamp=2018-08-03T03:46:00.425675Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip8kbm kind= uid=bcc465d2-96cf-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:46:00.425885Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip8kbm kind= uid=bcc465d2-96cf-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-t8v76 Pod phase: Running level=info timestamp=2018-08-03T03:22:58.685906Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-97tqs Pod phase: Running level=info timestamp=2018-08-03T03:23:32.294434Z pos=vm.go:338 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-03T03:23:32.294551Z pos=vm.go:365 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-03T03:23:32.294587Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-03T03:23:32.295327Z pos=vm.go:592 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmilbclt" level=info timestamp=2018-08-03T03:23:32.300830Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-03T03:23:32.301003Z pos=vm.go:753 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-03T03:23:32.301075Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:23:32.301121Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmilbclt, existing: false\n" level=info timestamp=2018-08-03T03:23:32.302218Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:23:32.302406Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:23:32.303237Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:23:32.303311Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmilbclt, existing: false\n" level=info timestamp=2018-08-03T03:23:32.303332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:23:32.303413Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:23:32.303510Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-hmjbh Pod phase: Running level=info timestamp=2018-08-03T03:42:22.250645Z pos=vm.go:390 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Removing domain and ephemeral data for finalized vmi." level=info timestamp=2018-08-03T03:42:22.250666Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Processing deletion." level=info timestamp=2018-08-03T03:42:22.250825Z pos=vm.go:592 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Signaled deletion for testvmivcbcg" level=info timestamp=2018-08-03T03:42:22.255296Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:42:59.374467Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmivcbcg, existing: false\n" level=info timestamp=2018-08-03T03:42:59.374546Z pos=vm.go:336 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-03T03:42:59.374572Z pos=vm.go:338 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-03T03:42:59.374670Z pos=vm.go:365 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-03T03:42:59.374705Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-03T03:42:59.374766Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:43:05.196757Z pos=vm.go:753 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-03T03:43:05.196876Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmivcbcg, existing: false\n" level=info timestamp=2018-08-03T03:43:05.196916Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:43:05.196996Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:43:05.197145Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmip8kbm-l59fq Pod phase: Pending level=info timestamp=2018-08-03T03:46:03.173739Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-03T03:46:03.174990Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-03T03:46:03.178046Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-03T03:46:13.189482Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-03T03:46:13.236099Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmip8kbm" level=info timestamp=2018-08-03T03:46:13.241370Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-03T03:46:13.241632Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.473 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 should attach virt-launcher to it [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:86 Timed out after 90.004s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1088 ------------------------------ level=info timestamp=2018-08-03T03:46:00.770479Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmip8kbm kind=VirtualMachineInstance uid=bcc465d2-96cf-11e8-bbc9-525500d15501 msg="Created virtual machine pod virt-launcher-testvmip8kbm-l59fq" •••• Pod name: disks-images-provider-mjqgd Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-n7d9r Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8d5mf Pod phase: Running level=info timestamp=2018-08-03T03:51:11.953685Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:51:13.120283Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:51:14 http: TLS handshake error from 10.129.0.1:46416: EOF level=info timestamp=2018-08-03T03:51:21.996495Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:51:24 http: TLS handshake error from 10.129.0.1:46424: EOF level=info timestamp=2018-08-03T03:51:32.042045Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:51:34 http: TLS handshake error from 10.129.0.1:46432: EOF level=info timestamp=2018-08-03T03:51:36.070583Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:51:36.073358Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:51:41.159846Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-03T03:51:42.091914Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:51:43.160297Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:51:44 http: TLS handshake error from 10.129.0.1:46440: EOF level=info timestamp=2018-08-03T03:51:52.144025Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:51:54 http: TLS handshake error from 10.129.0.1:46448: EOF Pod name: virt-api-7d79764579-qtnl5 Pod phase: Running 2018/08/03 03:49:35 http: TLS handshake error from 10.129.0.1:38128: EOF 2018/08/03 03:49:45 http: TLS handshake error from 10.129.0.1:38136: EOF 2018/08/03 03:49:55 http: TLS handshake error from 10.129.0.1:38144: EOF 2018/08/03 03:50:05 http: TLS handshake error from 10.129.0.1:38152: EOF 2018/08/03 03:50:15 http: TLS handshake error from 10.129.0.1:38160: EOF 2018/08/03 03:50:25 http: TLS handshake error from 10.129.0.1:38168: EOF 2018/08/03 03:50:35 http: TLS handshake error from 10.129.0.1:38176: EOF 2018/08/03 03:50:45 http: TLS handshake error from 10.129.0.1:38186: EOF 2018/08/03 03:50:55 http: TLS handshake error from 10.129.0.1:38194: EOF 2018/08/03 03:51:05 http: TLS handshake error from 10.129.0.1:38202: EOF 2018/08/03 03:51:15 http: TLS handshake error from 10.129.0.1:38210: EOF 2018/08/03 03:51:25 http: TLS handshake error from 10.129.0.1:38218: EOF 2018/08/03 03:51:35 http: TLS handshake error from 10.129.0.1:38226: EOF 2018/08/03 03:51:45 http: TLS handshake error from 10.129.0.1:38234: EOF 2018/08/03 03:51:55 http: TLS handshake error from 10.129.0.1:38242: EOF Pod name: virt-controller-7d57d96b65-4dwj2 Pod phase: Running level=info timestamp=2018-08-03T03:42:59.673630Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvminzn89 kind= uid=5107a7bc-96cf-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:42:59.835037Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminzn89\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvminzn89, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5107a7bc-96cf-11e8-bbc9-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminzn89" level=info timestamp=2018-08-03T03:42:59.959567Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi58jmt kind= uid=5132f9c2-96cf-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:42:59.959925Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi58jmt kind= uid=5132f9c2-96cf-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:43:00.039011Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi58jmt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi58jmt" level=info timestamp=2018-08-03T03:46:00.425675Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip8kbm kind= uid=bcc465d2-96cf-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:46:00.425885Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip8kbm kind= uid=bcc465d2-96cf-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:49:01.057586Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqwshq kind= uid=286eaa31-96d0-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:49:01.058200Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqwshq kind= uid=286eaa31-96d0-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:49:01.316477Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipcbzz kind= uid=28962caa-96d0-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:49:01.316646Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipcbzz kind= uid=28962caa-96d0-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:49:01.594577Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqdjjs kind= uid=28c0b822-96d0-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:49:01.596005Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqdjjs kind= uid=28c0b822-96d0-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:49:01.664871Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqdjjs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqdjjs" level=info timestamp=2018-08-03T03:49:01.689465Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqdjjs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqdjjs" Pod name: virt-controller-7d57d96b65-t8v76 Pod phase: Running level=info timestamp=2018-08-03T03:22:58.685906Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-97tqs Pod phase: Running level=info timestamp=2018-08-03T03:23:32.294434Z pos=vm.go:338 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-03T03:23:32.294551Z pos=vm.go:365 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-03T03:23:32.294587Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-03T03:23:32.295327Z pos=vm.go:592 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmilbclt" level=info timestamp=2018-08-03T03:23:32.300830Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-03T03:23:32.301003Z pos=vm.go:753 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-03T03:23:32.301075Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:23:32.301121Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmilbclt, existing: false\n" level=info timestamp=2018-08-03T03:23:32.302218Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:23:32.302406Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:23:32.303237Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:23:32.303311Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmilbclt, existing: false\n" level=info timestamp=2018-08-03T03:23:32.303332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:23:32.303413Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:23:32.303510Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-hmjbh Pod phase: Running level=info timestamp=2018-08-03T03:42:22.250645Z pos=vm.go:390 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Removing domain and ephemeral data for finalized vmi." level=info timestamp=2018-08-03T03:42:22.250666Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Processing deletion." level=info timestamp=2018-08-03T03:42:22.250825Z pos=vm.go:592 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Signaled deletion for testvmivcbcg" level=info timestamp=2018-08-03T03:42:22.255296Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:42:59.374467Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmivcbcg, existing: false\n" level=info timestamp=2018-08-03T03:42:59.374546Z pos=vm.go:336 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-03T03:42:59.374572Z pos=vm.go:338 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-03T03:42:59.374670Z pos=vm.go:365 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-03T03:42:59.374705Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-03T03:42:59.374766Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:43:05.196757Z pos=vm.go:753 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-03T03:43:05.196876Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmivcbcg, existing: false\n" level=info timestamp=2018-08-03T03:43:05.196916Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:43:05.196996Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:43:05.197145Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiqdjjs-8sdh7 Pod phase: Pending level=info timestamp=2018-08-03T03:49:04.272456Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-03T03:49:04.272724Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-03T03:49:04.274541Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-03T03:49:14.279489Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-03T03:49:14.302730Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiqdjjs" level=info timestamp=2018-08-03T03:49:14.304129Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-03T03:49:14.304319Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" ------------------------------ • Failure [180.466 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:174 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Alpine as first boot [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Timed out after 90.004s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1088 ------------------------------ STEP: defining a VirtualMachineInstance with an Alpine disk STEP: adding a Cirros Disk STEP: setting boot order STEP: starting VirtualMachineInstance STEP: Waiting the VirtualMachineInstance start level=info timestamp=2018-08-03T03:49:02.033638Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiqdjjs kind=VirtualMachineInstance uid=28c0b822-96d0-11e8-bbc9-525500d15501 msg="Created virtual machine pod virt-launcher-testvmiqdjjs-8sdh7" Pod name: disks-images-provider-mjqgd Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-n7d9r Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8d5mf Pod phase: Running 2018/08/03 03:54:14 http: TLS handshake error from 10.129.0.1:46562: EOF level=info timestamp=2018-08-03T03:54:22.849437Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:54:24 http: TLS handshake error from 10.129.0.1:46570: EOF level=info timestamp=2018-08-03T03:54:32.894648Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:54:34 http: TLS handshake error from 10.129.0.1:46578: EOF level=info timestamp=2018-08-03T03:54:36.606827Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:54:36.608683Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:54:41.220594Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-03T03:54:42.933052Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:54:43.623414Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:54:44 http: TLS handshake error from 10.129.0.1:46586: EOF level=info timestamp=2018-08-03T03:54:52.965574Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:54:54 http: TLS handshake error from 10.129.0.1:46594: EOF level=info timestamp=2018-08-03T03:54:55.033797Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-03T03:54:55.034815Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-qtnl5 Pod phase: Running 2018/08/03 03:53:05 http: TLS handshake error from 10.129.0.1:38298: EOF level=info timestamp=2018-08-03T03:53:11.201090Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:53:15 http: TLS handshake error from 10.129.0.1:38306: EOF 2018/08/03 03:53:25 http: TLS handshake error from 10.129.0.1:38314: EOF 2018/08/03 03:53:35 http: TLS handshake error from 10.129.0.1:38322: EOF level=info timestamp=2018-08-03T03:53:41.168691Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:53:45 http: TLS handshake error from 10.129.0.1:38332: EOF 2018/08/03 03:53:55 http: TLS handshake error from 10.129.0.1:38340: EOF 2018/08/03 03:54:05 http: TLS handshake error from 10.129.0.1:38348: EOF level=info timestamp=2018-08-03T03:54:11.154476Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:54:15 http: TLS handshake error from 10.129.0.1:38356: EOF 2018/08/03 03:54:25 http: TLS handshake error from 10.129.0.1:38364: EOF 2018/08/03 03:54:35 http: TLS handshake error from 10.129.0.1:38372: EOF 2018/08/03 03:54:45 http: TLS handshake error from 10.129.0.1:38380: EOF 2018/08/03 03:54:55 http: TLS handshake error from 10.129.0.1:38388: EOF Pod name: virt-controller-7d57d96b65-4dwj2 Pod phase: Running level=info timestamp=2018-08-03T03:42:59.959925Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi58jmt kind= uid=5132f9c2-96cf-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:43:00.039011Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi58jmt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi58jmt" level=info timestamp=2018-08-03T03:46:00.425675Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip8kbm kind= uid=bcc465d2-96cf-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:46:00.425885Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip8kbm kind= uid=bcc465d2-96cf-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:49:01.057586Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqwshq kind= uid=286eaa31-96d0-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:49:01.058200Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqwshq kind= uid=286eaa31-96d0-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:49:01.316477Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipcbzz kind= uid=28962caa-96d0-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:49:01.316646Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipcbzz kind= uid=28962caa-96d0-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:49:01.594577Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqdjjs kind= uid=28c0b822-96d0-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:49:01.596005Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqdjjs kind= uid=28c0b822-96d0-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:49:01.664871Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqdjjs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqdjjs" level=info timestamp=2018-08-03T03:49:01.689465Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqdjjs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqdjjs" level=info timestamp=2018-08-03T03:52:01.893729Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqdjjs\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqdjjs, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 28c0b822-96d0-11e8-bbc9-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqdjjs" level=info timestamp=2018-08-03T03:52:02.059704Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8p24z kind= uid=945146f4-96d0-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:52:02.060172Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8p24z kind= uid=945146f4-96d0-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-t8v76 Pod phase: Running level=info timestamp=2018-08-03T03:22:58.685906Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-97tqs Pod phase: Running level=info timestamp=2018-08-03T03:23:32.294434Z pos=vm.go:338 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-03T03:23:32.294551Z pos=vm.go:365 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-03T03:23:32.294587Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-03T03:23:32.295327Z pos=vm.go:592 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmilbclt" level=info timestamp=2018-08-03T03:23:32.300830Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-03T03:23:32.301003Z pos=vm.go:753 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-03T03:23:32.301075Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:23:32.301121Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmilbclt, existing: false\n" level=info timestamp=2018-08-03T03:23:32.302218Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:23:32.302406Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:23:32.303237Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:23:32.303311Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmilbclt, existing: false\n" level=info timestamp=2018-08-03T03:23:32.303332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:23:32.303413Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:23:32.303510Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-hmjbh Pod phase: Running level=info timestamp=2018-08-03T03:42:22.250645Z pos=vm.go:390 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Removing domain and ephemeral data for finalized vmi." level=info timestamp=2018-08-03T03:42:22.250666Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Processing deletion." level=info timestamp=2018-08-03T03:42:22.250825Z pos=vm.go:592 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Signaled deletion for testvmivcbcg" level=info timestamp=2018-08-03T03:42:22.255296Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:42:59.374467Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmivcbcg, existing: false\n" level=info timestamp=2018-08-03T03:42:59.374546Z pos=vm.go:336 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-03T03:42:59.374572Z pos=vm.go:338 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-03T03:42:59.374670Z pos=vm.go:365 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-03T03:42:59.374705Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-03T03:42:59.374766Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:43:05.196757Z pos=vm.go:753 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-03T03:43:05.196876Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmivcbcg, existing: false\n" level=info timestamp=2018-08-03T03:43:05.196916Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:43:05.196996Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:43:05.197145Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi8p24z-94khh Pod phase: Pending level=info timestamp=2018-08-03T03:52:04.890870Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-03T03:52:04.892256Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-03T03:52:04.895469Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-03T03:52:14.904071Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-03T03:52:14.960079Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi8p24z" level=info timestamp=2018-08-03T03:52:14.962758Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-03T03:52:14.963014Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.470 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:174 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Cirros as first boot [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Timed out after 90.004s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1088 ------------------------------ STEP: defining a VirtualMachineInstance with an Alpine disk STEP: adding a Cirros Disk STEP: setting boot order STEP: starting VirtualMachineInstance STEP: Waiting the VirtualMachineInstance start level=info timestamp=2018-08-03T03:52:02.477700Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi8p24z kind=VirtualMachineInstance uid=945146f4-96d0-11e8-bbc9-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi8p24z-94khh" • [SLOW TEST:60.262 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:205 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:206 should retry starting the VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:207 ------------------------------ •! Panic [60.243 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:205 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:206 should log warning and proceed once the secret is there [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:237 Test Panicked runtime error: invalid memory address or nil pointer dereference /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/panic.go:505 Full Stack Trace /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/panic.go:505 +0x229 kubevirt.io/kubevirt/tests_test.glob..func16.3.9.1.2() /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:258 +0x431 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*runner).runSync(0xc420940600, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/tests/tests_suite_test.go:43 +0xaa testing.tRunner(0xc4208343c0, 0x1432290) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:777 +0xd0 created by testing.(*T).Run /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:824 +0x2e0 ------------------------------ STEP: Starting a VirtualMachineInstance STEP: Checking that VirtualMachineInstance start failed level=info timestamp=2018-08-03T03:56:03.126145Z pos=utils.go:256 component=tests namespace=kubevirt-test-default name=testvmif4n5d kind=VirtualMachineInstance uid=23cd79c3-96d1-11e8-bbc9-525500d15501 msg="Created virtual machine pod virt-launcher-testvmif4n5d-s2jkk" Pod name: disks-images-provider-mjqgd Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-n7d9r Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8d5mf Pod phase: Running level=info timestamp=2018-08-03T03:59:40.316316Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-03T03:59:40.616620Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-03T03:59:42.230572Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:59:42.244667Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:59:42.256886Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:59:44 http: TLS handshake error from 10.129.0.1:46830: EOF level=info timestamp=2018-08-03T03:59:44.154009Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:59:44.400732Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:59:53.068810Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:59:53.087354Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T03:59:53.103961Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 03:59:54 http: TLS handshake error from 10.129.0.1:46838: EOF level=info timestamp=2018-08-03T03:59:54.059307Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-03T03:59:54.060606Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-03T03:59:54.448626Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-qtnl5 Pod phase: Running 2018/08/03 03:58:05 http: TLS handshake error from 10.129.0.1:38542: EOF 2018/08/03 03:58:15 http: TLS handshake error from 10.129.0.1:38550: EOF 2018/08/03 03:58:25 http: TLS handshake error from 10.129.0.1:38558: EOF 2018/08/03 03:58:35 http: TLS handshake error from 10.129.0.1:38566: EOF level=info timestamp=2018-08-03T03:58:41.139782Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:58:45 http: TLS handshake error from 10.129.0.1:38574: EOF 2018/08/03 03:58:55 http: TLS handshake error from 10.129.0.1:38582: EOF 2018/08/03 03:59:05 http: TLS handshake error from 10.129.0.1:38590: EOF level=info timestamp=2018-08-03T03:59:11.180723Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:59:15 http: TLS handshake error from 10.129.0.1:38598: EOF 2018/08/03 03:59:25 http: TLS handshake error from 10.129.0.1:38606: EOF 2018/08/03 03:59:35 http: TLS handshake error from 10.129.0.1:38614: EOF level=info timestamp=2018-08-03T03:59:41.149104Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 03:59:45 http: TLS handshake error from 10.129.0.1:38624: EOF 2018/08/03 03:59:55 http: TLS handshake error from 10.129.0.1:38632: EOF Pod name: virt-controller-7d57d96b65-4dwj2 Pod phase: Running level=info timestamp=2018-08-03T03:52:01.893729Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqdjjs\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqdjjs, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 28c0b822-96d0-11e8-bbc9-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqdjjs" level=info timestamp=2018-08-03T03:52:02.059704Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8p24z kind= uid=945146f4-96d0-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:52:02.060172Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8p24z kind= uid=945146f4-96d0-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:55:02.355455Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8p24z\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8p24z, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 945146f4-96d0-11e8-bbc9-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8p24z" level=info timestamp=2018-08-03T03:55:02.545910Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig4jr9 kind= uid=ffe4b5cb-96d0-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:55:02.547183Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig4jr9 kind= uid=ffe4b5cb-96d0-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:56:02.615286Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmig4jr9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmig4jr9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: ffe4b5cb-96d0-11e8-bbc9-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmig4jr9" level=info timestamp=2018-08-03T03:56:02.786438Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif4n5d kind= uid=23cd79c3-96d1-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:56:02.788067Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif4n5d kind= uid=23cd79c3-96d1-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:56:02.862659Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif4n5d\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif4n5d" level=info timestamp=2018-08-03T03:57:02.854485Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif4n5d\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmif4n5d, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 23cd79c3-96d1-11e8-bbc9-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif4n5d" level=info timestamp=2018-08-03T03:57:03.032964Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip9pbd kind= uid=47b623fc-96d1-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:57:03.033585Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip9pbd kind= uid=47b623fc-96d1-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:57:03.098482Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmip9pbd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmip9pbd" level=info timestamp=2018-08-03T03:57:03.121933Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmip9pbd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmip9pbd" Pod name: virt-controller-7d57d96b65-t8v76 Pod phase: Running level=info timestamp=2018-08-03T03:22:58.685906Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-97tqs Pod phase: Running level=info timestamp=2018-08-03T03:23:32.294434Z pos=vm.go:338 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-03T03:23:32.294551Z pos=vm.go:365 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-03T03:23:32.294587Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-03T03:23:32.295327Z pos=vm.go:592 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmilbclt" level=info timestamp=2018-08-03T03:23:32.300830Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-03T03:23:32.301003Z pos=vm.go:753 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-03T03:23:32.301075Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:23:32.301121Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmilbclt, existing: false\n" level=info timestamp=2018-08-03T03:23:32.302218Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:23:32.302406Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:23:32.303237Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:23:32.303311Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmilbclt, existing: false\n" level=info timestamp=2018-08-03T03:23:32.303332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:23:32.303413Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:23:32.303510Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-hmjbh Pod phase: Running level=info timestamp=2018-08-03T03:42:22.250645Z pos=vm.go:390 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Removing domain and ephemeral data for finalized vmi." level=info timestamp=2018-08-03T03:42:22.250666Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Processing deletion." level=info timestamp=2018-08-03T03:42:22.250825Z pos=vm.go:592 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Signaled deletion for testvmivcbcg" level=info timestamp=2018-08-03T03:42:22.255296Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:42:59.374467Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmivcbcg, existing: false\n" level=info timestamp=2018-08-03T03:42:59.374546Z pos=vm.go:336 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-03T03:42:59.374572Z pos=vm.go:338 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-03T03:42:59.374670Z pos=vm.go:365 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-03T03:42:59.374705Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-03T03:42:59.374766Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:43:05.196757Z pos=vm.go:753 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-03T03:43:05.196876Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmivcbcg, existing: false\n" level=info timestamp=2018-08-03T03:43:05.196916Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:43:05.196996Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:43:05.197145Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmip9pbd-gs9x9 Pod phase: Pending level=info timestamp=2018-08-03T03:57:05.519293Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-03T03:57:05.519560Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-03T03:57:05.521039Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-03T03:57:15.528938Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-03T03:57:15.579013Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmip9pbd" level=info timestamp=2018-08-03T03:57:15.582012Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-03T03:57:15.582296Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.465 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-launcher crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:285 should be stopped and have Failed phase [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:286 Timed out after 90.004s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1088 ------------------------------ level=info timestamp=2018-08-03T03:57:03.420501Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmip9pbd kind=VirtualMachineInstance uid=47b623fc-96d1-11e8-bbc9-525500d15501 msg="Created virtual machine pod virt-launcher-testvmip9pbd-gs9x9" Pod name: disks-images-provider-mjqgd Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-n7d9r Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8d5mf Pod phase: Running level=info timestamp=2018-08-03T04:02:38.136413Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T04:02:38.139261Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T04:02:42.807090Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T04:02:42.824570Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T04:02:42.840327Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 04:02:44 http: TLS handshake error from 10.129.0.1:46976: EOF level=info timestamp=2018-08-03T04:02:44.547871Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T04:02:45.269628Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T04:02:53.890825Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T04:02:53.906589Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T04:02:53.918173Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 04:02:54 http: TLS handshake error from 10.129.0.1:46984: EOF level=info timestamp=2018-08-03T04:02:54.066192Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-03T04:02:54.067657Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-03T04:02:55.314043Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-qtnl5 Pod phase: Running 2018/08/03 04:00:55 http: TLS handshake error from 10.129.0.1:38680: EOF 2018/08/03 04:01:05 http: TLS handshake error from 10.129.0.1:38688: EOF 2018/08/03 04:01:15 http: TLS handshake error from 10.129.0.1:38696: EOF 2018/08/03 04:01:25 http: TLS handshake error from 10.129.0.1:38704: EOF 2018/08/03 04:01:35 http: TLS handshake error from 10.129.0.1:38712: EOF 2018/08/03 04:01:45 http: TLS handshake error from 10.129.0.1:38720: EOF 2018/08/03 04:01:55 http: TLS handshake error from 10.129.0.1:38728: EOF 2018/08/03 04:02:05 http: TLS handshake error from 10.129.0.1:38736: EOF level=info timestamp=2018-08-03T04:02:11.177298Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 04:02:15 http: TLS handshake error from 10.129.0.1:38744: EOF 2018/08/03 04:02:25 http: TLS handshake error from 10.129.0.1:38752: EOF 2018/08/03 04:02:35 http: TLS handshake error from 10.129.0.1:38760: EOF level=info timestamp=2018-08-03T04:02:41.178390Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 04:02:45 http: TLS handshake error from 10.129.0.1:38770: EOF 2018/08/03 04:02:55 http: TLS handshake error from 10.129.0.1:38778: EOF Pod name: virt-controller-7d57d96b65-4dwj2 Pod phase: Running level=info timestamp=2018-08-03T03:55:02.547183Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig4jr9 kind= uid=ffe4b5cb-96d0-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:56:02.615286Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmig4jr9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmig4jr9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: ffe4b5cb-96d0-11e8-bbc9-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmig4jr9" level=info timestamp=2018-08-03T03:56:02.786438Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif4n5d kind= uid=23cd79c3-96d1-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:56:02.788067Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif4n5d kind= uid=23cd79c3-96d1-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:56:02.862659Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif4n5d\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif4n5d" level=info timestamp=2018-08-03T03:57:02.854485Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif4n5d\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmif4n5d, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 23cd79c3-96d1-11e8-bbc9-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif4n5d" level=info timestamp=2018-08-03T03:57:03.032964Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip9pbd kind= uid=47b623fc-96d1-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:57:03.033585Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip9pbd kind= uid=47b623fc-96d1-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:57:03.098482Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmip9pbd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmip9pbd" level=info timestamp=2018-08-03T03:57:03.121933Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmip9pbd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmip9pbd" level=info timestamp=2018-08-03T04:00:03.330689Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmip9pbd\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmip9pbd, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 47b623fc-96d1-11e8-bbc9-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmip9pbd" level=info timestamp=2018-08-03T04:00:03.496835Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic7mck kind= uid=b3470bef-96d1-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T04:00:03.497001Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic7mck kind= uid=b3470bef-96d1-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T04:00:03.560137Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic7mck\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic7mck" level=info timestamp=2018-08-03T04:00:03.599792Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic7mck\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic7mck" Pod name: virt-controller-7d57d96b65-t8v76 Pod phase: Running level=info timestamp=2018-08-03T03:22:58.685906Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-97tqs Pod phase: Running level=info timestamp=2018-08-03T03:23:32.294434Z pos=vm.go:338 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-03T03:23:32.294551Z pos=vm.go:365 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-03T03:23:32.294587Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-03T03:23:32.295327Z pos=vm.go:592 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmilbclt" level=info timestamp=2018-08-03T03:23:32.300830Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-03T03:23:32.301003Z pos=vm.go:753 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-03T03:23:32.301075Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:23:32.301121Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmilbclt, existing: false\n" level=info timestamp=2018-08-03T03:23:32.302218Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:23:32.302406Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:23:32.303237Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:23:32.303311Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmilbclt, existing: false\n" level=info timestamp=2018-08-03T03:23:32.303332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:23:32.303413Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:23:32.303510Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-hmjbh Pod phase: Running level=info timestamp=2018-08-03T03:42:22.250645Z pos=vm.go:390 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Removing domain and ephemeral data for finalized vmi." level=info timestamp=2018-08-03T03:42:22.250666Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Processing deletion." level=info timestamp=2018-08-03T03:42:22.250825Z pos=vm.go:592 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Signaled deletion for testvmivcbcg" level=info timestamp=2018-08-03T03:42:22.255296Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:42:59.374467Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmivcbcg, existing: false\n" level=info timestamp=2018-08-03T03:42:59.374546Z pos=vm.go:336 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-03T03:42:59.374572Z pos=vm.go:338 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-03T03:42:59.374670Z pos=vm.go:365 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-03T03:42:59.374705Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-03T03:42:59.374766Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:43:05.196757Z pos=vm.go:753 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-03T03:43:05.196876Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmivcbcg, existing: false\n" level=info timestamp=2018-08-03T03:43:05.196916Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:43:05.196996Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:43:05.197145Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmic7mck-fcg5s Pod phase: Pending level=info timestamp=2018-08-03T04:00:06.556594Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-03T04:00:06.556866Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-03T04:00:06.558236Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-03T04:00:16.568180Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-03T04:00:16.594773Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmic7mck" level=info timestamp=2018-08-03T04:00:16.596818Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-03T04:00:16.597035Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.480 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:309 should recover and continue management [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:310 Timed out after 90.004s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1088 ------------------------------ level=info timestamp=2018-08-03T04:00:03.862767Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmic7mck kind=VirtualMachineInstance uid=b3470bef-96d1-11e8-bbc9-525500d15501 msg="Created virtual machine pod virt-launcher-testvmic7mck-fcg5s" • [SLOW TEST:36.343 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler is responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:340 should indicate that a node is ready for vmis /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:341 ------------------------------ Pod name: disks-images-provider-mjqgd Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-n7d9r Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8d5mf Pod phase: Running level=info timestamp=2018-08-03T04:05:54.472372Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T04:05:56.185331Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 04:06:04 http: TLS handshake error from 10.129.0.1:47138: EOF level=info timestamp=2018-08-03T04:06:06.224251Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T04:06:08.756447Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T04:06:08.757405Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 04:06:14 http: TLS handshake error from 10.129.0.1:47146: EOF level=info timestamp=2018-08-03T04:06:15.033751Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T04:06:16.276553Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 04:06:24 http: TLS handshake error from 10.129.0.1:47154: EOF level=info timestamp=2018-08-03T04:06:26.319238Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/03 04:06:34 http: TLS handshake error from 10.129.0.1:47162: EOF level=info timestamp=2018-08-03T04:06:36.361563Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T04:06:38.845418Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-03T04:06:38.845769Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-qtnl5 Pod phase: Running level=info timestamp=2018-08-03T04:04:41.174997Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 04:04:45 http: TLS handshake error from 10.129.0.1:38866: EOF 2018/08/03 04:04:55 http: TLS handshake error from 10.129.0.1:38874: EOF 2018/08/03 04:05:05 http: TLS handshake error from 10.129.0.1:38882: EOF 2018/08/03 04:05:15 http: TLS handshake error from 10.129.0.1:38890: EOF 2018/08/03 04:05:25 http: TLS handshake error from 10.129.0.1:38898: EOF 2018/08/03 04:05:35 http: TLS handshake error from 10.129.0.1:38906: EOF level=info timestamp=2018-08-03T04:05:41.219342Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 04:05:45 http: TLS handshake error from 10.129.0.1:38916: EOF 2018/08/03 04:05:55 http: TLS handshake error from 10.129.0.1:38924: EOF 2018/08/03 04:06:05 http: TLS handshake error from 10.129.0.1:38932: EOF level=info timestamp=2018-08-03T04:06:11.179270Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/03 04:06:15 http: TLS handshake error from 10.129.0.1:38940: EOF 2018/08/03 04:06:25 http: TLS handshake error from 10.129.0.1:38948: EOF 2018/08/03 04:06:35 http: TLS handshake error from 10.129.0.1:38956: EOF Pod name: virt-controller-7d57d96b65-4dwj2 Pod phase: Running level=info timestamp=2018-08-03T03:56:02.788067Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif4n5d kind= uid=23cd79c3-96d1-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:56:02.862659Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif4n5d\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif4n5d" level=info timestamp=2018-08-03T03:57:02.854485Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif4n5d\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmif4n5d, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 23cd79c3-96d1-11e8-bbc9-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif4n5d" level=info timestamp=2018-08-03T03:57:03.032964Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip9pbd kind= uid=47b623fc-96d1-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T03:57:03.033585Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip9pbd kind= uid=47b623fc-96d1-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T03:57:03.098482Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmip9pbd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmip9pbd" level=info timestamp=2018-08-03T03:57:03.121933Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmip9pbd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmip9pbd" level=info timestamp=2018-08-03T04:00:03.330689Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmip9pbd\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmip9pbd, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 47b623fc-96d1-11e8-bbc9-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmip9pbd" level=info timestamp=2018-08-03T04:00:03.496835Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic7mck kind= uid=b3470bef-96d1-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T04:00:03.497001Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic7mck kind= uid=b3470bef-96d1-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-03T04:00:03.560137Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic7mck\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic7mck" level=info timestamp=2018-08-03T04:00:03.599792Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic7mck\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic7mck" level=info timestamp=2018-08-03T04:03:03.889587Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic7mck\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmic7mck, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: b3470bef-96d1-11e8-bbc9-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic7mck" level=info timestamp=2018-08-03T04:03:40.165638Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4q2h8 kind= uid=346b6c23-96d2-11e8-bbc9-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-03T04:03:40.165822Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4q2h8 kind= uid=346b6c23-96d2-11e8-bbc9-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-t8v76 Pod phase: Running level=info timestamp=2018-08-03T03:22:58.685906Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-97tqs Pod phase: Running level=info timestamp=2018-08-03T03:23:32.294434Z pos=vm.go:338 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-03T03:23:32.294551Z pos=vm.go:365 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-03T03:23:32.294587Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-03T03:23:32.295327Z pos=vm.go:592 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmilbclt" level=info timestamp=2018-08-03T03:23:32.300830Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-03T03:23:32.301003Z pos=vm.go:753 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-03T03:23:32.301075Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:23:32.301121Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmilbclt, existing: false\n" level=info timestamp=2018-08-03T03:23:32.302218Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:23:32.302406Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:23:32.303237Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:23:32.303311Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmilbclt, existing: false\n" level=info timestamp=2018-08-03T03:23:32.303332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:23:32.303413Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:23:32.303510Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmilbclt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-hmjbh Pod phase: Running level=info timestamp=2018-08-03T03:42:22.250645Z pos=vm.go:390 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Removing domain and ephemeral data for finalized vmi." level=info timestamp=2018-08-03T03:42:22.250666Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Processing deletion." level=info timestamp=2018-08-03T03:42:22.250825Z pos=vm.go:592 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Signaled deletion for testvmivcbcg" level=info timestamp=2018-08-03T03:42:22.255296Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind= uid=1b5facdd-96cf-11e8-bbc9-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:42:59.374467Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmivcbcg, existing: false\n" level=info timestamp=2018-08-03T03:42:59.374546Z pos=vm.go:336 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-03T03:42:59.374572Z pos=vm.go:338 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-03T03:42:59.374670Z pos=vm.go:365 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-03T03:42:59.374705Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-03T03:42:59.374766Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-03T03:43:05.196757Z pos=vm.go:753 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-03T03:43:05.196876Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmivcbcg, existing: false\n" level=info timestamp=2018-08-03T03:43:05.196916Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-03T03:43:05.196996Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-03T03:43:05.197145Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmivcbcg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi4q2h8-fmghs Pod phase: Pending level=info timestamp=2018-08-03T04:03:42.948515Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-03T04:03:42.950070Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-03T04:03:42.951821Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-03T04:03:52.961578Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-03T04:03:53.002966Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi4q2h8" level=info timestamp=2018-08-03T04:03:53.005160Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-03T04:03:53.005346Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [180.310 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler is not responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:371 the node controller should react [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:410 Timed out after 90.003s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1088 ------------------------------ level=info timestamp=2018-08-03T04:03:40.545810Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi4q2h8 kind=VirtualMachineInstance uid=346b6c23-96d2-11e8-bbc9-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi4q2h8-fmghs" panic: test timed out after 1h30m0s goroutine 9609 [running]: testing.(*M).startAlarm.func1() /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1240 +0xfc created by time.goFunc /gimme/.gimme/versions/go1.10.linux.amd64/src/time/sleep.go:172 +0x44 goroutine 1 [chan receive, 90 minutes]: testing.(*T).Run(0xc4208343c0, 0x139fc95, 0x9, 0x1432290, 0x4801e6) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:825 +0x301 testing.runTests.func1(0xc4208342d0) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1063 +0x64 testing.tRunner(0xc4208342d0, 0xc4205b7df8) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:777 +0xd0 testing.runTests(0xc420aeb020, 0x1d34a50, 0x1, 0x1, 0x412009) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1061 +0x2c4 testing.(*M).Run(0xc4203a1180, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:978 +0x171 main.main() _testmain.go:44 +0x151 goroutine 5 [chan receive]: kubevirt.io/kubevirt/vendor/github.com/golang/glog.(*loggingT).flushDaemon(0x1d60280) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/golang/glog/glog.go:879 +0x8b created by kubevirt.io/kubevirt/vendor/github.com/golang/glog.init.0 /root/go/src/kubevirt.io/kubevirt/vendor/github.com/golang/glog/glog.go:410 +0x203 goroutine 18 [syscall, 90 minutes]: os/signal.signal_recv(0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/sigqueue.go:139 +0xa6 os/signal.loop() /gimme/.gimme/versions/go1.10.linux.amd64/src/os/signal/signal_unix.go:22 +0x22 created by os/signal.init.0 /gimme/.gimme/versions/go1.10.linux.amd64/src/os/signal/signal_unix.go:28 +0x41 goroutine 40 [select]: kubevirt.io/kubevirt/tests.(*ObjectEventWatcher).Watch(0xc4208450d8, 0xc4208c2930) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:285 +0x579 kubevirt.io/kubevirt/tests.(*ObjectEventWatcher).WaitFor(0xc420fe50d8, 0x139ba31, 0x6, 0x11e4920, 0x14b27f0, 0x0) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:295 +0xba kubevirt.io/kubevirt/tests.waitForVMIStart(0x14bbe40, 0xc420f55680, 0x5a, 0x0, 0x0, 0x1d7e901) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1051 +0x50e kubevirt.io/kubevirt/tests.WaitForSuccessfulVMIStart(0x14bbe40, 0xc420f55680, 0x1d7e938, 0x0) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1088 +0x43 kubevirt.io/kubevirt/tests_test.glob..func16.3.14.3() /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:503 +0x3ed kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*runner).runSync(0xc4209413e0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/runner.go:113 +0x9c kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*runner).run(0xc4209413e0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/runner.go:64 +0x13e kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*ItNode).Run(0xc420519b20, 0x14b82e0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/it_node.go:26 +0x7f kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec.(*Spec).runSample(0xc420837680, 0x0, 0x14b82e0, 0xc420122c80) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec/spec.go:203 +0x648 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec.(*Spec).Run(0xc420837680, 0x14b82e0, 0xc420122c80) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec/spec.go:138 +0xff kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).runSpec(0xc4203883c0, 0xc420837680, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:200 +0x10d kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).runSpecs(0xc4203883c0, 0x1) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:170 +0x329 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).Run(0xc4203883c0, 0xb) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:66 +0x11b kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/suite.(*Suite).Run(0xc420120910, 0x7fa618fce5d0, 0xc4208343c0, 0x13a2278, 0xb, 0xc420aeb060, 0x2, 0x2, 0x14d4be0, 0xc420122c80, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/suite/suite.go:62 +0x27c kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo.RunSpecsWithCustomReporters(0x14b9340, 0xc4208343c0, 0x13a2278, 0xb, 0xc420aeb040, 0x2, 0x2, 0x2) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/ginkgo_dsl.go:221 +0x258 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo.RunSpecsWithDefaultAndCustomReporters(0x14b9340, 0xc4208343c0, 0x13a2278, 0xb, 0xc420ab3cd0, 0x1, 0x1, 0x1) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/ginkgo_dsl.go:209 +0xab kubevirt.io/kubevirt/tests_test.TestTests(0xc4208343c0) /root/go/src/kubevirt.io/kubevirt/tests/tests_suite_test.go:43 +0xaa testing.tRunner(0xc4208343c0, 0x1432290) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:777 +0xd0 created by testing.(*T).Run /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:824 +0x2e0 goroutine 41 [chan receive, 90 minutes]: kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).registerForInterrupts(0xc4203883c0, 0xc4204b20c0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:223 +0xd1 created by kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).Run /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:60 +0x88 goroutine 42 [select, 90 minutes, locked to thread]: runtime.gopark(0x1434468, 0x0, 0x139c7b7, 0x6, 0x18, 0x1) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/proc.go:291 +0x11a runtime.selectgo(0xc420077750, 0xc4204b2180) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/select.go:392 +0xe50 runtime.ensureSigM.func1() /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/signal_unix.go:549 +0x1f4 runtime.goexit() /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/asm_amd64.s:2361 +0x1 goroutine 52 [IO wait]: internal/poll.runtime_pollWait(0x7fa6190f2f00, 0x72, 0xc420088850) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/netpoll.go:173 +0x57 internal/poll.(*pollDesc).wait(0xc420502f18, 0x72, 0xffffffffffffff00, 0x14ba500, 0x1c4b7d0) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_poll_runtime.go:85 +0x9b internal/poll.(*pollDesc).waitRead(0xc420502f18, 0xc420674000, 0x8000, 0x8000) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_poll_runtime.go:90 +0x3d internal/poll.(*FD).Read(0xc420502f00, 0xc420674000, 0x8000, 0x8000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_unix.go:157 +0x17d net.(*netFD).Read(0xc420502f00, 0xc420674000, 0x8000, 0x8000, 0x0, 0x8, 0x7ffb) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/fd_unix.go:202 +0x4f net.(*conn).Read(0xc42011e050, 0xc420674000, 0x8000, 0x8000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/net.go:176 +0x6a crypto/tls.(*block).readFromUntil(0xc4200fe450, 0x7fa6190c27a0, 0xc42011e050, 0x5, 0xc42011e050, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:493 +0x96 crypto/tls.(*Conn).readRecord(0xc4200d8000, 0x1434517, 0xc4200d8120, 0x20) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:595 +0xe0 crypto/tls.(*Conn).Read(0xc4200d8000, 0xc4202d7000, 0x1000, 0x1000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:1156 +0x100 bufio.(*Reader).Read(0xc420554240, 0xc42042c2d8, 0x9, 0x9, 0xc420497528, 0x1, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/bufio/bufio.go:216 +0x238 io.ReadAtLeast(0x14b70e0, 0xc420554240, 0xc42042c2d8, 0x9, 0x9, 0x9, 0xc4209c7a20, 0x43f2c1, 0xc420156f00) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/io.go:309 +0x86 io.ReadFull(0x14b70e0, 0xc420554240, 0xc42042c2d8, 0x9, 0x9, 0x14345b0, 0xc420088d10, 0x462d33) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/io.go:327 +0x58 kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.readFrameHeader(0xc42042c2d8, 0x9, 0x9, 0x14b70e0, 0xc420554240, 0x0, 0xc400000000, 0x7efb60, 0xc420750a28) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/frame.go:237 +0x7b kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*Framer).ReadFrame(0xc42042c2a0, 0xc4208c2000, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/frame.go:492 +0xa4 kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*clientConnReadLoop).run(0xc420088fb0, 0x14331e8, 0xc4209377b0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:1428 +0x8e kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*ClientConn).readLoop(0xc4202d2000) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:1354 +0x76 created by kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*Transport).newClientConn /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:579 +0x651 goroutine 9634 [semacquire]: sync.runtime_notifyListWait(0xc420750a40, 0xc400000001) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/sema.go:510 +0x10b sync.(*Cond).Wait(0xc420750a30) /gimme/.gimme/versions/go1.10.linux.amd64/src/sync/cond.go:56 +0x80 kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*pipe).Read(0xc420750a28, 0xc420ab4001, 0x5ff, 0x5ff, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/pipe.go:64 +0x8f kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.transportResponseBody.Read(0xc420750a00, 0xc420ab4001, 0x5ff, 0x5ff, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:1674 +0xa1 encoding/json.(*Decoder).refill(0xc420a9a3c0, 0x835c0a, 0x9) /gimme/.gimme/versions/go1.10.linux.amd64/src/encoding/json/stream.go:159 +0x132 encoding/json.(*Decoder).readValue(0xc420a9a3c0, 0x0, 0x0, 0x11f6880) /gimme/.gimme/versions/go1.10.linux.amd64/src/encoding/json/stream.go:134 +0x23d encoding/json.(*Decoder).Decode(0xc420a9a3c0, 0x12127c0, 0xc4204f1ce0, 0x14bf100, 0xc420b00000) /gimme/.gimme/versions/go1.10.linux.amd64/src/encoding/json/stream.go:63 +0x78 kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/util/framer.(*jsonFrameReader).Read(0xc4208000c0, 0xc420110000, 0x400, 0x400, 0xc4207b2000, 0x40, 0x38) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/util/framer/framer.go:150 +0x295 kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/runtime/serializer/streaming.(*decoder).Decode(0xc4208300a0, 0x0, 0x14c0dc0, 0xc4207b2000, 0x380, 0x14bf100, 0xc4200481d8, 0x456ae0, 0xc420048180) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/runtime/serializer/streaming/streaming.go:77 +0x95 kubevirt.io/kubevirt/vendor/k8s.io/client-go/rest/watch.(*Decoder).Decode(0xc4205ac000, 0xc420e6ffa8, 0x5, 0x14bf100, 0xc420b00000, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/client-go/rest/watch/decoder.go:49 +0x7c kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4208000f0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:93 +0x12e created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 4166 [chan send, 38 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420407b00) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 9635 [chan receive]: kubevirt.io/kubevirt/tests.(*ObjectEventWatcher).Watch.func3(0x14c1340, 0xc4208000f0, 0xc4205b22f0, 0xc4200481e0) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:276 +0x93 created by kubevirt.io/kubevirt/tests.(*ObjectEventWatcher).Watch /root/go/src/kubevirt.io/kubevirt/tests/utils.go:274 +0x4ae goroutine 4022 [chan send, 39 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420406990) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 3734 [chan send, 40 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc42040e1e0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 5549 [chan send, 27 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4204f9fb0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 3246 [chan send, 41 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420adac30) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 make: *** [functest] Error 2 + make cluster-down ./cluster/down.sh