+ export WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + [[ openshift-3.10-release =~ openshift-.* ]] + [[ openshift-3.10-release =~ .*-crio-.* ]] + export KUBEVIRT_PROVIDER=os-3.10.0 + KUBEVIRT_PROVIDER=os-3.10.0 + export KUBEVIRT_NUM_NODES=2 + KUBEVIRT_NUM_NODES=2 + export NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + export NAMESPACE=kube-system + NAMESPACE=kube-system + trap '{ make cluster-down; }' EXIT SIGINT SIGTERM SIGSTOP + make cluster-down ./cluster/down.sh + make cluster-up ./cluster/up.sh Downloading ....... Downloading ....... 2018/08/07 17:53:58 Waiting for host: 192.168.66.102:22 2018/08/07 17:54:01 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/08/07 17:54:09 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/08/07 17:54:17 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/08/07 17:54:25 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/08/07 17:54:31 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: connection refused. Sleeping 5s 2018/08/07 17:54:36 Connected to tcp://192.168.66.102:22 + systemctl stop origin-node.service + rm -rf /etc/origin/ /etc/etcd/ /var/lib/origin /var/lib/etcd/ ++ docker ps -q + containers= + '[' -n '' ']' ++ docker ps -q -a + containers='8d985261fee2 cdd5cb050f63 3b39c685d71c 1fe63ccc7eb5 7fb562e6d373 7357d7573809 4cd9d31e59d6 3dd2b4e034df b8bd1bfbd6fe 2b04452ad9d2 5a8d8e270d07 5bcee7ba14b9 d317f6c94b21 249a96e5b5be bf8eede72349 37a4fab28e2d 08ce4e71eea8 e4e0f326f8c6 0a856ce6e727 64a97837a605 4e67e156ec7c 6b2d2d048d59 8b938906ea49 6572244ef7bc b44328453c0f f0bde1a8d259 61984dd36d89 d0b161b029c9 6b12e033271c 7cd73847cb28 c98ef990eea8' + '[' -n '8d985261fee2 cdd5cb050f63 3b39c685d71c 1fe63ccc7eb5 7fb562e6d373 7357d7573809 4cd9d31e59d6 3dd2b4e034df b8bd1bfbd6fe 2b04452ad9d2 5a8d8e270d07 5bcee7ba14b9 d317f6c94b21 249a96e5b5be bf8eede72349 37a4fab28e2d 08ce4e71eea8 e4e0f326f8c6 0a856ce6e727 64a97837a605 4e67e156ec7c 6b2d2d048d59 8b938906ea49 6572244ef7bc b44328453c0f f0bde1a8d259 61984dd36d89 d0b161b029c9 6b12e033271c 7cd73847cb28 c98ef990eea8' ']' + docker rm -f 8d985261fee2 cdd5cb050f63 3b39c685d71c 1fe63ccc7eb5 7fb562e6d373 7357d7573809 4cd9d31e59d6 3dd2b4e034df b8bd1bfbd6fe 2b04452ad9d2 5a8d8e270d07 5bcee7ba14b9 d317f6c94b21 249a96e5b5be bf8eede72349 37a4fab28e2d 08ce4e71eea8 e4e0f326f8c6 0a856ce6e727 64a97837a605 4e67e156ec7c 6b2d2d048d59 8b938906ea49 6572244ef7bc b44328453c0f f0bde1a8d259 61984dd36d89 d0b161b029c9 6b12e033271c 7cd73847cb28 c98ef990eea8 8d985261fee2 cdd5cb050f63 3b39c685d71c 1fe63ccc7eb5 7fb562e6d373 7357d7573809 4cd9d31e59d6 3dd2b4e034df b8bd1bfbd6fe 2b04452ad9d2 5a8d8e270d07 5bcee7ba14b9 d317f6c94b21 249a96e5b5be bf8eede72349 37a4fab28e2d 08ce4e71eea8 e4e0f326f8c6 0a856ce6e727 64a97837a605 4e67e156ec7c 6b2d2d048d59 8b938906ea49 6572244ef7bc b44328453c0f f0bde1a8d259 61984dd36d89 d0b161b029c9 6b12e033271c 7cd73847cb28 c98ef990eea8 2018/08/07 17:54:39 Waiting for host: 192.168.66.101:22 2018/08/07 17:54:42 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/08/07 17:54:50 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/08/07 17:54:58 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/08/07 17:55:06 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/08/07 17:55:14 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: connection refused. Sleeping 5s 2018/08/07 17:55:19 Connected to tcp://192.168.66.101:22 + inventory_file=/root/inventory + openshift_ansible=/root/openshift-ansible + echo '[new_nodes]' + sed -i '/\[OSEv3:children\]/a new_nodes' /root/inventory + nodes_found=false ++ seq 2 100 + for i in '$(seq 2 100)' ++ printf node%02d 2 + node=node02 ++ printf 192.168.66.1%02d 2 + node_ip=192.168.66.102 + set +e + ping 192.168.66.102 -c 1 PING 192.168.66.102 (192.168.66.102) 56(84) bytes of data. 64 bytes from 192.168.66.102: icmp_seq=1 ttl=64 time=2.27 ms --- 192.168.66.102 ping statistics --- 1 packets transmitted, 1 received, 0% packet loss, time 0ms rtt min/avg/max/mdev = 2.274/2.274/2.274/0.000 ms + '[' 0 -ne 0 ']' + nodes_found=true + set -e + echo '192.168.66.102 node02' Found node02. Adding it to the inventory. + echo 'Found node02. Adding it to the inventory.' + echo 'node02 openshift_node_group_name="node-config-compute" openshift_schedulable=true openshift_ip=192.168.66.102' + for i in '$(seq 2 100)' ++ printf node%02d 3 + node=node03 ++ printf 192.168.66.1%02d 3 + node_ip=192.168.66.103 + set +e + ping 192.168.66.103 -c 1 PING 192.168.66.103 (192.168.66.103) 56(84) bytes of data. From 192.168.66.101 icmp_seq=1 Destination Host Unreachable --- 192.168.66.103 ping statistics --- 1 packets transmitted, 0 received, +1 errors, 100% packet loss, time 0ms + '[' 1 -ne 0 ']' + break + '[' true = true ']' + ansible-playbook -i /root/inventory /root/openshift-ansible/playbooks/openshift-node/scaleup.yml PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) PLAY [Ensure there are new_nodes] ********************************************** TASK [fail] ******************************************************************** skipping: [localhost] TASK [fail] ******************************************************************** skipping: [localhost] PLAY [Initialization Checkpoint Start] ***************************************** TASK [Set install initialization 'In Progress'] ******************************** ok: [node01] PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) [WARNING]: Could not match supplied host pattern, ignoring: oo_lb_to_config PLAY [Ensure that all non-node hosts are accessible] *************************** TASK [Gathering Facts] ********************************************************* ok: [node01] PLAY [Initialize basic host facts] ********************************************* TASK [Gathering Facts] ********************************************************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/deprecations.yml for node01, node02 TASK [openshift_sanitize_inventory : Check for usage of deprecated variables] *** ok: [node02] ok: [node01] TASK [openshift_sanitize_inventory : debug] ************************************ skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_stats] ******************************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Assign deprecated variables to correct counterparts] *** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_logging.yml for node01, node02 included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_metrics.yml for node01, node02 TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node02] ok: [node01] TASK [openshift_sanitize_inventory : set_fact] ********************************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node02] ok: [node01] TASK [openshift_sanitize_inventory : Standardize on latest variable names] ***** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Normalize openshift_release] ************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Abort when openshift_release is invalid] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/unsupported.yml for node01, node02 TASK [openshift_sanitize_inventory : Ensure that openshift_use_dnsmasq is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that openshift_node_dnsmasq_install_network_manager_hook is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* skipping: [node01] => (item=openshift_hosted_etcd_storage_kind) skipping: [node02] => (item=openshift_hosted_etcd_storage_kind) TASK [openshift_sanitize_inventory : Ensure that dynamic provisioning is set if using dynamic storage] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure clusterid is set along with the cloudprovider] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure ansible_service_broker_remove and ansible_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure template_service_broker_remove and template_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that all requires vsphere configuration variables are set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : ensure provider configuration variables are defined] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure removed web console extension variables are not set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that web console port matches API server port] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : At least one master is schedulable] ******* skipping: [node01] skipping: [node02] TASK [Detecting Operating System from ostree_booted] *************************** ok: [node01] ok: [node02] TASK [set openshift_deployment_type if unset] ********************************** skipping: [node01] skipping: [node02] TASK [check for node already bootstrapped] ************************************* ok: [node01] ok: [node02] TASK [initialize_facts set fact openshift_is_bootstrapped] ********************* ok: [node01] ok: [node02] TASK [initialize_facts set fact openshift_is_atomic and openshift_is_containerized] *** ok: [node01] ok: [node02] TASK [Determine Atomic Host Docker Version] ************************************ skipping: [node01] skipping: [node02] TASK [assert atomic host docker version is 1.12 or later] ********************** skipping: [node01] skipping: [node02] PLAY [Retrieve existing master configs and validate] *************************** TASK [openshift_control_plane : stat] ****************************************** ok: [node01] TASK [openshift_control_plane : slurp] ***************************************** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [openshift_control_plane : Check for file paths outside of /etc/origin/master in master's config] *** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Initialize special first-master variables] ******************************* TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Disable web console if required] ***************************************** TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Setup yum repositories for all hosts] ************************************ TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Install Red Hat Subscription manager] ******************* skipping: [node02] TASK [rhel_subscribe : Is host already registered?] **************************** skipping: [node02] TASK [rhel_subscribe : Register host] ****************************************** skipping: [node02] TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Determine if OpenShift Pool Already Attached] *********** skipping: [node02] TASK [rhel_subscribe : Attach to OpenShift Pool] ******************************* skipping: [node02] TASK [rhel_subscribe : Satellite preparation] ********************************** skipping: [node02] TASK [openshift_repos : openshift_repos detect ostree] ************************* ok: [node02] TASK [openshift_repos : Ensure libselinux-python is installed] ***************** ok: [node02] TASK [openshift_repos : Remove openshift_additional.repo file] ***************** ok: [node02] TASK [openshift_repos : Create any additional repos that are defined] ********** TASK [openshift_repos : include_tasks] ***************************************** skipping: [node02] TASK [openshift_repos : include_tasks] ***************************************** included: /root/openshift-ansible/roles/openshift_repos/tasks/centos_repos.yml for node02 TASK [openshift_repos : Configure origin gpg keys] ***************************** ok: [node02] TASK [openshift_repos : Configure correct origin release repository] *********** ok: [node02] => (item=/root/openshift-ansible/roles/openshift_repos/templates/CentOS-OpenShift-Origin.repo.j2) TASK [openshift_repos : Ensure clean repo cache in the event repos have been changed manually] *** changed: [node02] => { "msg": "First run of openshift_repos" } TASK [openshift_repos : Record that openshift_repos already ran] *************** ok: [node02] RUNNING HANDLER [openshift_repos : refresh cache] ****************************** changed: [node02] PLAY [Install packages necessary for installer] ******************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Determine if chrony is installed] **************************************** [WARNING]: Consider using the yum, dnf or zypper module rather than running rpm. If you need to use command because yum, dnf or zypper is insufficient you can add warn=False to this command task or set command_warnings=False in ansible.cfg to get rid of this message. changed: [node02] TASK [Install ntp package] ***************************************************** skipping: [node02] TASK [Start and enable ntpd/chronyd] ******************************************* changed: [node02] TASK [Ensure openshift-ansible installer package deps are installed] *********** ok: [node02] => (item=iproute) ok: [node02] => (item=dbus-python) ok: [node02] => (item=PyYAML) ok: [node02] => (item=python-ipaddress) ok: [node02] => (item=libsemanage-python) ok: [node02] => (item=yum-utils) ok: [node02] => (item=python-docker) PLAY [Initialize cluster facts] ************************************************ TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [get openshift_current_version] ******************************************* ok: [node02] ok: [node01] TASK [set_fact openshift_portal_net if present on masters] ********************* ok: [node01] ok: [node02] TASK [Gather Cluster facts] **************************************************** changed: [node02] changed: [node01] TASK [Set fact of no_proxy_internal_hostnames] ********************************* skipping: [node01] skipping: [node02] TASK [Initialize openshift.node.sdn_mtu] *************************************** changed: [node02] ok: [node01] PLAY [Initialize etcd host variables] ****************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** ok: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** ok: [node01] => { "msg": "openshift_pkg_version was not defined. Falling back to -3.10.0" } TASK [openshift_version : set_fact] ******************************************** ok: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Verify Requirements] ***************************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [Run variable sanity checks] ********************************************** ok: [node01] TASK [Validate openshift_node_groups and openshift_node_group_name] ************ ok: [node01] PLAY [Initialization Checkpoint End] ******************************************* TASK [Set install initialization 'Complete'] *********************************** ok: [node01] PLAY [Validate node hostnames] ************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Query DNS for IP address of node02] ************************************** ok: [node02] TASK [Validate openshift_hostname when defined] ******************************** skipping: [node02] TASK [Validate openshift_ip exists on node when defined] *********************** skipping: [node02] PLAY [Configure os_firewall] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [os_firewall : Detecting Atomic Host Operating System] ******************** ok: [node02] TASK [os_firewall : Set fact r_os_firewall_is_atomic] ************************** ok: [node02] TASK [os_firewall : Fail - Firewalld is not supported on Atomic Host] ********** skipping: [node02] TASK [os_firewall : Install firewalld packages] ******************************** skipping: [node02] TASK [os_firewall : Ensure iptables services are not enabled] ****************** skipping: [node02] => (item=iptables) skipping: [node02] => (item=ip6tables) TASK [os_firewall : Wait 10 seconds after disabling iptables] ****************** skipping: [node02] TASK [os_firewall : Start and enable firewalld service] ************************ skipping: [node02] TASK [os_firewall : need to pause here, otherwise the firewalld service starting can sometimes cause ssh to fail] *** skipping: [node02] TASK [os_firewall : Restart polkitd] ******************************************* skipping: [node02] TASK [os_firewall : Wait for polkit action to have been created] *************** skipping: [node02] TASK [os_firewall : Ensure firewalld service is not enabled] ******************* ok: [node02] TASK [os_firewall : Wait 10 seconds after disabling firewalld] ***************** skipping: [node02] TASK [os_firewall : Install iptables packages] ********************************* ok: [node02] => (item=iptables) ok: [node02] => (item=iptables-services) TASK [os_firewall : Start and enable iptables service] ************************* ok: [node02 -> node02] => (item=node02) TASK [os_firewall : need to pause here, otherwise the iptables service starting can sometimes cause ssh to fail] *** skipping: [node02] PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [container_runtime : Setup the docker-storage for overlay] **************** skipping: [node02] TASK [container_runtime : Create file system on extra volume device] *********** TASK [container_runtime : Create mount entry for extra volume] ***************** PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** ok: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* ok: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Add http_proxy to /etc/atomic.conf] ****************** skipping: [node02] TASK [container_runtime : Add https_proxy to /etc/atomic.conf] ***************** skipping: [node02] TASK [container_runtime : Add no_proxy to /etc/atomic.conf] ******************** skipping: [node02] TASK [container_runtime : Get current installed Docker version] **************** ok: [node02] TASK [container_runtime : Error out if Docker pre-installed but too old] ******* skipping: [node02] TASK [container_runtime : Error out if requested Docker is too old] ************ skipping: [node02] TASK [container_runtime : Install Docker] ************************************** skipping: [node02] TASK [container_runtime : Ensure docker.service.d directory exists] ************ ok: [node02] TASK [container_runtime : Configure Docker service unit file] ****************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Set registry params] ********************************* skipping: [node02] => (item={u'reg_conf_var': u'ADD_REGISTRY', u'reg_flag': u'--add-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'BLOCK_REGISTRY', u'reg_flag': u'--block-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'INSECURE_REGISTRY', u'reg_flag': u'--insecure-registry', u'reg_fact_val': []}) TASK [container_runtime : Place additional/blocked/insecure registries in /etc/containers/registries.conf] *** skipping: [node02] TASK [container_runtime : Set Proxy Settings] ********************************** skipping: [node02] => (item={u'reg_conf_var': u'HTTP_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'HTTPS_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'NO_PROXY', u'reg_fact_val': u''}) TASK [container_runtime : Set various Docker options] ************************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Configure Docker Network OPTIONS] ******************** ok: [node02] TASK [container_runtime : Detect if docker is already started] ***************** ok: [node02] TASK [container_runtime : Start the Docker service] **************************** ok: [node02] TASK [container_runtime : set_fact] ******************************************** ok: [node02] TASK [container_runtime : Check for docker_storage_path/overlay2] ************** ok: [node02] TASK [container_runtime : Fixup SELinux permissions for docker] **************** changed: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* ok: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** ok: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** ok: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] TASK [container_runtime : Fail if Atomic Host since this is an rpm request] **** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Check that overlay is in the kernel] ***************** skipping: [node02] TASK [container_runtime : Add overlay to modprobe.d] *************************** skipping: [node02] TASK [container_runtime : Manually modprobe overlay into the kernel] *********** skipping: [node02] TASK [container_runtime : Enable and start systemd-modules-load] *************** skipping: [node02] TASK [container_runtime : Install cri-o] *************************************** skipping: [node02] TASK [container_runtime : Remove CRI-O default configuration files] ************ skipping: [node02] => (item=/etc/cni/net.d/200-loopback.conf) skipping: [node02] => (item=/etc/cni/net.d/100-crio-bridge.conf) TASK [container_runtime : Create the CRI-O configuration] ********************** skipping: [node02] TASK [container_runtime : Ensure CNI configuration directory exists] *********** skipping: [node02] TASK [container_runtime : Add iptables allow rules] **************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove iptables rules] ******************************* TASK [container_runtime : Add firewalld allow rules] *************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove firewalld allow rules] ************************ TASK [container_runtime : Configure the CNI network] *************************** skipping: [node02] TASK [container_runtime : Create /etc/sysconfig/crio-network] ****************** skipping: [node02] TASK [container_runtime : Start the CRI-O service] ***************************** skipping: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* skipping: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** skipping: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** skipping: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** skipping: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Node Preparation Checkpoint Start] *************************************** TASK [Set Node preparation 'In Progress'] ************************************** ok: [node01] PLAY [Only target nodes that have not yet been bootstrapped] ******************* TASK [Gathering Facts] ********************************************************* ok: [localhost] TASK [add_host] **************************************************************** skipping: [localhost] => (item=node02) ok: [localhost] => (item=node01) PLAY [Disable excluders] ******************************************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/disable.yml for node02 TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-docker-excluder version detected] ************ skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-docker-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-excluder version detected] ******************* skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** skipping: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] PLAY [Configure nodes] ********************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_cloud_provider : Set cloud provider facts] ********************* skipping: [node02] TASK [openshift_cloud_provider : Create cloudprovider config dir] ************** skipping: [node02] TASK [openshift_cloud_provider : include the defined cloud provider files] ***** skipping: [node02] TASK [openshift_node : fail] *************************************************** skipping: [node02] TASK [openshift_node : Check for NetworkManager service] *********************** ok: [node02] TASK [openshift_node : Set fact using_network_manager] ************************* ok: [node02] TASK [openshift_node : Install dnsmasq] **************************************** ok: [node02] TASK [openshift_node : ensure origin/node directory exists] ******************** changed: [node02] => (item=/etc/origin) changed: [node02] => (item=/etc/origin/node) TASK [openshift_node : Install NetworkManager during node_bootstrap provisioning] *** skipping: [node02] TASK [openshift_node : Install network manager dispatch script] **************** skipping: [node02] TASK [openshift_node : Install dnsmasq configuration] ************************** ok: [node02] TASK [openshift_node : Deploy additional dnsmasq.conf] ************************* skipping: [node02] TASK [openshift_node : Enable dnsmasq] ***************************************** ok: [node02] TASK [openshift_node : Install network manager dispatch script] **************** ok: [node02] TASK [openshift_node : Add iptables allow rules] ******************************* ok: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) ok: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) ok: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) ok: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) ok: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove iptables rules] ********************************** TASK [openshift_node : Add firewalld allow rules] ****************************** skipping: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) skipping: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) skipping: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) skipping: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) skipping: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove firewalld allow rules] *************************** TASK [openshift_node : Checking for journald.conf] ***************************** ok: [node02] TASK [openshift_node : Create journald persistence directories] **************** ok: [node02] TASK [openshift_node : Update journald setup] ********************************** ok: [node02] => (item={u'var': u'Storage', u'val': u'persistent'}) ok: [node02] => (item={u'var': u'Compress', u'val': True}) ok: [node02] => (item={u'var': u'SyncIntervalSec', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitInterval', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitBurst', u'val': 10000}) ok: [node02] => (item={u'var': u'SystemMaxUse', u'val': u'8G'}) ok: [node02] => (item={u'var': u'SystemKeepFree', u'val': u'20%'}) ok: [node02] => (item={u'var': u'SystemMaxFileSize', u'val': u'10M'}) ok: [node02] => (item={u'var': u'MaxRetentionSec', u'val': u'1month'}) ok: [node02] => (item={u'var': u'MaxFileSec', u'val': u'1day'}) ok: [node02] => (item={u'var': u'ForwardToSyslog', u'val': False}) ok: [node02] => (item={u'var': u'ForwardToWall', u'val': False}) TASK [openshift_node : Restart journald] *************************************** skipping: [node02] TASK [openshift_node : Disable swap] ******************************************* ok: [node02] TASK [openshift_node : Install node, clients, and conntrack packages] ********** ok: [node02] => (item={u'name': u'origin-node-3.10.0*'}) ok: [node02] => (item={u'name': u'origin-clients-3.10.0*'}) ok: [node02] => (item={u'name': u'conntrack-tools'}) TASK [openshift_node : Restart cri-o] ****************************************** skipping: [node02] TASK [openshift_node : restart NetworkManager to ensure resolv.conf is present] *** changed: [node02] TASK [openshift_node : sysctl] ************************************************* ok: [node02] TASK [openshift_node : Check for credentials file for registry auth] *********** skipping: [node02] TASK [openshift_node : Create credentials for registry auth] ******************* skipping: [node02] TASK [openshift_node : Create credentials for registry auth (alternative)] ***** skipping: [node02] TASK [openshift_node : Setup ro mount of /root/.docker for containerized hosts] *** skipping: [node02] TASK [openshift_node : Check that node image is present] *********************** changed: [node02] TASK [openshift_node : Pre-pull node image] ************************************ skipping: [node02] TASK [openshift_node : Copy node script to the node] *************************** ok: [node02] TASK [openshift_node : Install Node service file] ****************************** ok: [node02] TASK [openshift_node : Ensure old system path is set] ************************** skipping: [node02] => (item=/etc/origin/openvswitch) skipping: [node02] => (item=/var/lib/kubelet) skipping: [node02] => (item=/opt/cni/bin) TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Copy node container image to ostree storage] ************ skipping: [node02] TASK [openshift_node : Install or Update node system container] **************** skipping: [node02] TASK [openshift_node : Restart network manager to ensure networking configuration is in place] *** skipping: [node02] TASK [openshift_node : Configure Node settings] ******************************** ok: [node02] => (item={u'regex': u'^OPTIONS=', u'line': u'OPTIONS='}) ok: [node02] => (item={u'regex': u'^DEBUG_LOGLEVEL=', u'line': u'DEBUG_LOGLEVEL=2'}) ok: [node02] => (item={u'regex': u'^IMAGE_VERSION=', u'line': u'IMAGE_VERSION=v3.10.0-rc.0'}) TASK [openshift_node : Configure Proxy Settings] ******************************* skipping: [node02] => (item={u'regex': u'^HTTP_PROXY=', u'line': u'HTTP_PROXY='}) skipping: [node02] => (item={u'regex': u'^HTTPS_PROXY=', u'line': u'HTTPS_PROXY='}) skipping: [node02] => (item={u'regex': u'^NO_PROXY=', u'line': u'NO_PROXY=[],172.30.0.0/16,10.128.0.0/14'}) TASK [openshift_node : file] *************************************************** skipping: [node02] TASK [openshift_node : Create the Node config] ********************************* changed: [node02] TASK [openshift_node : Configure Node Environment Variables] ******************* TASK [openshift_node : Ensure the node static pod directory exists] ************ changed: [node02] TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Install NFS storage plugin dependencies] **************** ok: [node02] TASK [openshift_node : Check for existence of nfs sebooleans] ****************** ok: [node02] => (item=virt_use_nfs) ok: [node02] => (item=virt_sandbox_use_nfs) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-07 18:04:24.548051', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.016933', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-08-07 18:04:24.531118', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-07 18:04:26.175037', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.016086', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-08-07 18:04:26.158951', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-07 18:04:24.548051', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.016933', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-08-07 18:04:24.531118', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-07 18:04:26.175037', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.016086', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-08-07 18:04:26.158951', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install GlusterFS storage plugin dependencies] ********** ok: [node02] TASK [openshift_node : Check for existence of fusefs sebooleans] *************** ok: [node02] => (item=virt_use_fusefs) ok: [node02] => (item=virt_sandbox_use_fusefs) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-07 18:04:34.400973', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.017754', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-08-07 18:04:34.383219', '_ansible_ignore_errors': None, 'failed': False}) ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-07 18:04:35.983123', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.017519', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-08-07 18:04:35.965604', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-07 18:04:34.400973', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.017754', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-08-07 18:04:34.383219', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-07 18:04:35.983123', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.017519', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-08-07 18:04:35.965604', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install Ceph storage plugin dependencies] *************** ok: [node02] TASK [openshift_node : Install iSCSI storage plugin dependencies] ************** ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=device-mapper-multipath) TASK [openshift_node : restart services] *************************************** ok: [node02] => (item=multipathd) ok: [node02] => (item=rpcbind) ok: [node02] => (item=iscsid) TASK [openshift_node : Template multipath configuration] *********************** changed: [node02] TASK [openshift_node : Enable and start multipath] ***************************** changed: [node02] TASK [tuned : Check for tuned package] ***************************************** ok: [node02] TASK [tuned : Set tuned OpenShift variables] *********************************** ok: [node02] TASK [tuned : Ensure directory structure exists] ******************************* ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1533205381.9317749, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1533205381.9317749}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1533205381.9327748}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1533205381.9327748}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Ensure files are populated from templates] *********************** skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1533205381.9317749, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1533205381.9317749}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1533205381.9327748}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1533205381.9327748}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Make tuned use the recommended tuned profile on restart] ********* changed: [node02] => (item=/etc/tuned/active_profile) changed: [node02] => (item=/etc/tuned/profile_mode) TASK [tuned : Restart tuned service] ******************************************* changed: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Install logrotate] ******* ok: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Setup logrotate.d scripts] *** PLAY [node bootstrap config] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_node : install needed rpm(s)] ********************************** ok: [node02] => (item=origin-node) ok: [node02] => (item=origin-docker-excluder) ok: [node02] => (item=ansible) ok: [node02] => (item=bash-completion) ok: [node02] => (item=docker) ok: [node02] => (item=haproxy) ok: [node02] => (item=dnsmasq) ok: [node02] => (item=ntp) ok: [node02] => (item=logrotate) ok: [node02] => (item=httpd-tools) ok: [node02] => (item=bind-utils) ok: [node02] => (item=firewalld) ok: [node02] => (item=libselinux-python) ok: [node02] => (item=conntrack-tools) ok: [node02] => (item=openssl) ok: [node02] => (item=iproute) ok: [node02] => (item=python-dbus) ok: [node02] => (item=PyYAML) ok: [node02] => (item=yum-utils) ok: [node02] => (item=glusterfs-fuse) ok: [node02] => (item=device-mapper-multipath) ok: [node02] => (item=nfs-utils) ok: [node02] => (item=cockpit-ws) ok: [node02] => (item=cockpit-system) ok: [node02] => (item=cockpit-bridge) ok: [node02] => (item=cockpit-docker) ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=ceph-common) TASK [openshift_node : create the directory for node] ************************** skipping: [node02] TASK [openshift_node : laydown systemd override] ******************************* skipping: [node02] TASK [openshift_node : update the sysconfig to have necessary variables] ******* ok: [node02] => (item={u'regexp': u'^KUBECONFIG=.*', u'line': u'KUBECONFIG=/etc/origin/node/bootstrap.kubeconfig'}) TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : disable origin-node service] **************************** changed: [node02] => (item=origin-node.service) TASK [openshift_node : Check for RPM generated config marker file .config_managed] *** ok: [node02] TASK [openshift_node : create directories for bootstrapping] ******************* ok: [node02] => (item=/root/openshift_bootstrap) changed: [node02] => (item=/var/lib/origin/openshift.local.config) changed: [node02] => (item=/var/lib/origin/openshift.local.config/node) ok: [node02] => (item=/etc/docker/certs.d/docker-registry.default.svc:5000) TASK [openshift_node : laydown the bootstrap.yml file for on boot configuration] *** ok: [node02] TASK [openshift_node : Create a symlink to the node client CA for the docker registry] *** ok: [node02] TASK [openshift_node : Remove RPM generated config files if present] *********** skipping: [node02] => (item=master) skipping: [node02] => (item=.config_managed) TASK [openshift_node : find all files in /etc/origin/node so we can remove them] *** skipping: [node02] TASK [openshift_node : Remove everything except the resolv.conf required for node] *** skipping: [node02] TASK [openshift_node_group : create node config template] ********************** changed: [node02] TASK [openshift_node_group : remove existing node config] ********************** changed: [node02] TASK [openshift_node_group : Ensure required directories are present] ********** ok: [node02] => (item=/etc/origin/node/pods) changed: [node02] => (item=/etc/origin/node/certificates) TASK [openshift_node_group : Update the sysconfig to group "node-config-compute"] *** changed: [node02] TASK [set_fact] **************************************************************** ok: [node02] PLAY [Re-enable excluder if it was previously enabled] ************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/enable.yml for node02 TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] PLAY [Node Preparation Checkpoint End] ***************************************** TASK [Set Node preparation 'Complete'] ***************************************** ok: [node01] PLAY [Distribute bootstrap and start nodes] ************************************ TASK [openshift_node : Gather node information] ******************************** changed: [node02] ok: [node01] TASK [openshift_node : Copy master bootstrap config locally] ******************* ok: [node02 -> node01] TASK [openshift_node : Distribute bootstrap kubeconfig if one does not exist] *** ok: [node01] changed: [node02] TASK [openshift_node : Start and enable node for bootstrapping] **************** changed: [node02] changed: [node01] TASK [openshift_node : Get node logs] ****************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : debug] ************************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : fail] *************************************************** skipping: [node02] skipping: [node01] PLAY [Approve any pending CSR requests from inventory nodes] ******************* TASK [Dump all candidate bootstrap hostnames] ********************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Find all hostnames for bootstrapping] ************************************ ok: [node01] TASK [Dump the bootstrap hostnames] ******************************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Approve bootstrap nodes] ************************************************* changed: [node01] TASK [Get CSRs] **************************************************************** skipping: [node01] TASK [Report approval errors] ************************************************** skipping: [node01] PLAY [Ensure any inventory labels are applied to the nodes] ******************** TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [openshift_manage_node : Wait for master API to become available before proceeding] *** skipping: [node02] TASK [openshift_manage_node : Wait for Node Registration] ********************** ok: [node02 -> node01] ok: [node01 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/config.yml for node02, node01 TASK [openshift_manage_node : Set node schedulability] ************************* ok: [node02 -> node01] ok: [node01 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/set_default_node_role.yml for node02, node01 TASK [openshift_manage_node : Retrieve nodes that are marked with the infra selector or the legacy infra selector] *** ok: [node02 -> node01] TASK [openshift_manage_node : Label infra or legacy infra nodes with the new role label] *** TASK [openshift_manage_node : Retrieve non-infra, non-master nodes that are not yet labeled compute] *** ok: [node02 -> node01] TASK [openshift_manage_node : label non-master non-infra nodes compute] ******** TASK [openshift_manage_node : Label all-in-one master as a compute node] ******* skipping: [node02] PLAY RECAP ********************************************************************* localhost : ok=30 changed=0 unreachable=0 failed=0 node01 : ok=71 changed=3 unreachable=0 failed=0 node02 : ok=155 changed=33 unreachable=0 failed=0 INSTALLER STATUS *************************************************************** Initialization : Complete (0:03:40) Node Preparation : Complete (0:06:06) + set +e + crio=false + grep crio /root/inventory + '[' 1 -eq 0 ']' + set -e + cat + ansible-playbook -i /root/inventory post_deployment_configuration --extra-vars=crio=false PLAY [nodes, new_nodes] ******************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [replace] ***************************************************************** skipping: [node01] skipping: [node02] TASK [replace] ***************************************************************** skipping: [node01] skipping: [node02] TASK [service] ***************************************************************** skipping: [node01] skipping: [node02] PLAY RECAP ********************************************************************* node01 : ok=1 changed=0 unreachable=0 failed=0 node02 : ok=1 changed=0 unreachable=0 failed=0 + set -x + /usr/bin/oc get nodes --no-headers node01 Ready compute,infra,master 5d v1.10.0+b81c8f8 node02 Ready compute 46s v1.10.0+b81c8f8 + os_rc=0 + retry_counter=0 + [[ 0 -lt 20 ]] + [[ 0 -ne 0 ]] + /usr/bin/oc create -f /tmp/local-volume.yaml storageclass.storage.k8s.io "local" created configmap "local-storage-config" created clusterrolebinding.rbac.authorization.k8s.io "local-storage-provisioner-pv-binding" created clusterrole.rbac.authorization.k8s.io "local-storage-provisioner-node-clusterrole" created clusterrolebinding.rbac.authorization.k8s.io "local-storage-provisioner-node-binding" created role.rbac.authorization.k8s.io "local-storage-provisioner-jobs-role" created rolebinding.rbac.authorization.k8s.io "local-storage-provisioner-jobs-rolebinding" created serviceaccount "local-storage-admin" created daemonset.extensions "local-volume-provisioner" created Sending file modes: C0755 110489328 oc Sending file modes: C0600 5645 admin.kubeconfig Cluster "node01:8443" set. Cluster "node01:8443" set. + set +e + kubectl get nodes --no-headers + cluster/kubectl.sh get nodes --no-headers node01 Ready compute,infra,master 5d v1.10.0+b81c8f8 node02 Ready compute 1m v1.10.0+b81c8f8 + kubectl_rc=0 + '[' 0 -ne 0 ']' ++ kubectl get nodes --no-headers ++ grep NotReady ++ cluster/kubectl.sh get nodes --no-headers + '[' -n '' ']' + set -e + echo 'Nodes are ready:' Nodes are ready: + kubectl get nodes + cluster/kubectl.sh get nodes NAME STATUS ROLES AGE VERSION node01 Ready compute,infra,master 5d v1.10.0+b81c8f8 node02 Ready compute 1m v1.10.0+b81c8f8 + make cluster-sync ./cluster/build.sh Building ... Untagged: localhost:33031/kubevirt/virt-controller:devel Untagged: localhost:33031/kubevirt/virt-controller@sha256:ef945348398e38218e26dcda4ae5aac5f51da9d495ceabc3bddefb9133f4b517 Deleted: sha256:07640f3b3354ea3378998a221272638c2a9d0bd506ed0e4292438145ac3f7a91 Deleted: sha256:86f9e4dad972fbfe5022476b5fd72a64e6c3125eb58bd58997c0448c0e1dc4b5 Deleted: sha256:dacde7398976116226d590b24561635e30a3eb0534a86b634f9f2e23151f0887 Deleted: sha256:d98569ea99f808493bca6785bb47809a2719310959951dc618fdc0b953c19593 Untagged: localhost:33031/kubevirt/virt-launcher:devel Untagged: localhost:33031/kubevirt/virt-launcher@sha256:63b6d4a310897edca6ad209eb15e4e3702ec44ba4f67b43c0421f24478f13834 Deleted: sha256:a307719a3cfdb9ba82f8cf0b8e0e06b804a800c58494c057030bd7f0f19438c2 Deleted: sha256:ee4dab140c05c7443611e5cfeba00acdbcbcadd5f1defcab39e6417b1390b568 Deleted: sha256:6b622b2aa24df8802e541f9bf290fc40ec50b27a2b79728332b6aa0555eeec64 Deleted: sha256:6b0e1c4f0a571a3279e73daf94d7279c9d239eb19569292b5ca965628e7893dc Deleted: sha256:f426e724a1418c38cc23bf3dda547946b36bbb02dee8e4605096921e9018b3b6 Deleted: sha256:e6138cc825beb9420898ca7ce65d34b73cba0afea734b48e1565892bf2ff48c5 Deleted: sha256:5bdc4d86caba7201dfa06e4ab6f0d5c0c325dafa97d5e32d5eb77662070cba4f Deleted: sha256:66fc87aa48463781fa238e3fa40e4539f6dec9bffc2d34d6c318a5531c0504f3 Deleted: sha256:6ca669bad20396ac505fc39530c8b2f2f5d8ea7072be9857225b3a36e42425a2 Deleted: sha256:ec4f705daf6a8a1ff845c7fbdbf310c7fcd9f8574336f4dbf3e1a86512f023d9 Deleted: sha256:4d54870ac3dc5d1ead9234470116907ce02015f3bc6fdd8a2ded8af88fd4b4fe Deleted: sha256:45403cb543198db6ea3970709bd06fde05c8049b7ee928596ed4f47c62f8f1f9 Untagged: localhost:33031/kubevirt/virt-handler:devel Untagged: localhost:33031/kubevirt/virt-handler@sha256:4b08d189e4eebad1cd91507973b76406ccf14cd8540682c2990679b06086d34f Deleted: sha256:93b2f96fc6660e07071934e41953fba008553e05e2ecc317ee1acfc4cd4c0965 Deleted: sha256:e4c4f3e9d3771fb8d74bab6d69dd085e8178c48a2765b8ffbc441323df19d315 Deleted: sha256:2ee82db2cd20aa34d699508b284709fffbb3b9691f90cc1f50d2ee51db082273 Deleted: sha256:c17ea321f0fff37ed49a1c88763861fe245fa656c6006d631b2a6576624e30b7 Untagged: localhost:33031/kubevirt/virt-api:devel Untagged: localhost:33031/kubevirt/virt-api@sha256:bbf5596def4fd1497e2b39fde8be3dc470c6e589ee084e150896e747b17c1fb9 Deleted: sha256:9d78a094eff3867dbd83ade26c3332c9c289a824a4bddbcfa06bb1019360bf1f Deleted: sha256:06369a83854a201e41a3ea1a0a18d395f7ba3d4c08642b4dc31ec0ada4064f0a Deleted: sha256:bf6d6e679a8d55d5b35940d00e34397b740143589a912171d094a858566f70b1 Deleted: sha256:06f2550cf527e0da431a620579d7794ac25f8b220a5c036b7c7938acbb3edf49 Untagged: localhost:33031/kubevirt/subresource-access-test:devel Untagged: localhost:33031/kubevirt/subresource-access-test@sha256:17393fa676c8e804f6c170c05d12c2ef468977535af25e76df31528e95936225 Deleted: sha256:6295a820018eab4a17c0204f76c40dfdbca2b02769f4e2ced91b12f5a7a3f6a0 Deleted: sha256:ed3e777ef25b08daf04bfa03ef02885083c2635b96725706b96d37323448d08d Deleted: sha256:3b6ffc8be0ae90d3947d1b87b3baa6494e8cfbf2c2df473d143d6ca6a6780126 Deleted: sha256:c8a73cbbba31c0a49835a2e40428d108339043817d09dbd2499ff3a571d645b6 Untagged: localhost:33031/kubevirt/example-hook-sidecar:devel Untagged: localhost:33031/kubevirt/example-hook-sidecar@sha256:0a88281bec3804a0e5ae923542d2f2fb24f42aa41f013aa23be5a4c3d2674974 Deleted: sha256:bc3e90b02e54b14d021aef7cdd44bab795bb1fd281b207d92ea510d5023c048f Deleted: sha256:db4480f676b1772914d07a7cf55ec1cb4e9e159bb716b6c42c197ef624034d16 Deleted: sha256:977cb6e96bc3472579631bd89532acbdf75292de36fe64668afdbfb74a69dc8d Deleted: sha256:b963669fd7546a56a4580013a8880d33d2da4bf92a0d918254fce1079ebd2595 Sending build context to Docker daemon 5.632 kB Step 1/12 : FROM fedora:28 ---> cc510acfcd70 Step 2/12 : ENV LIBVIRT_VERSION 4.2.0 ---> Using cache ---> b1088795aeb6 Step 3/12 : RUN curl --output /etc/yum.repos.d/fedora-virt-preview.repo https://fedorapeople.org/groups/virt/virt-preview/fedora-virt-preview.repo ---> Using cache ---> 88f43b954f9f Step 4/12 : RUN dnf -y install libvirt-devel-${LIBVIRT_VERSION} make git mercurial sudo gcc findutils gradle rsync-daemon rsync qemu-img protobuf-compiler && dnf -y clean all ---> Using cache ---> 06c70b43758a Step 5/12 : ENV GIMME_GO_VERSION 1.10 ---> Using cache ---> e5b3ae738662 Step 6/12 : RUN mkdir -p /gimme && curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | HOME=/gimme bash >> /etc/profile.d/gimme.sh ---> Using cache ---> 3e3d43f49e45 Step 7/12 : ENV GOPATH "/go" GOBIN "/usr/bin" ---> Using cache ---> 73e8a3aa263a Step 8/12 : ADD rsyncd.conf /etc/rsyncd.conf ---> Using cache ---> bc244b1c712b Step 9/12 : RUN mkdir -p /go && source /etc/profile.d/gimme.sh && go get github.com/mattn/goveralls && go get -u github.com/Masterminds/glide && go get golang.org/x/tools/cmd/goimports && git clone https://github.com/mvdan/sh.git $GOPATH/src/mvdan.cc/sh && cd /go/src/mvdan.cc/sh/cmd/shfmt && git checkout v2.5.0 && go get mvdan.cc/sh/cmd/shfmt && go install && go get -u github.com/golang/mock/gomock && go get -u github.com/rmohr/mock/mockgen && go get -u github.com/rmohr/go-swagger-utils/swagger-doc && go get -u github.com/onsi/ginkgo/ginkgo && go get -u -d k8s.io/code-generator/cmd/deepcopy-gen && go get -u -d k8s.io/code-generator/cmd/defaulter-gen && go get -u -d k8s.io/code-generator/cmd/openapi-gen && cd /go/src/k8s.io/code-generator/cmd/deepcopy-gen && git checkout release-1.9 && go install && cd /go/src/k8s.io/code-generator/cmd/defaulter-gen && git checkout release-1.9 && go install && cd /go/src/k8s.io/code-generator/cmd/openapi-gen && git checkout release-1.9 && go install && go get -u -d github.com/golang/protobuf/protoc-gen-go && cd /go/src/github.com/golang/protobuf/protoc-gen-go && git checkout 1643683e1b54a9e88ad26d98f81400c8c9d9f4f9 && go install ---> Using cache ---> 4cd1786b2bc8 Step 10/12 : RUN pip install j2cli ---> Using cache ---> b51a532fa53a Step 11/12 : ADD entrypoint.sh /entrypoint.sh ---> Using cache ---> 3bc0185264f6 Step 12/12 : ENTRYPOINT /entrypoint.sh ---> Using cache ---> dcf2b21fa2ed Successfully built dcf2b21fa2ed go version go1.10 linux/amd64 go version go1.10 linux/amd64 make[1]: Entering directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' hack/dockerized "./hack/check.sh && KUBEVIRT_VERSION= ./hack/build-go.sh install " && ./hack/build-copy-artifacts.sh Sending build context to Docker daemon 5.632 kB Step 1/12 : FROM fedora:28 ---> cc510acfcd70 Step 2/12 : ENV LIBVIRT_VERSION 4.2.0 ---> Using cache ---> b1088795aeb6 Step 3/12 : RUN curl --output /etc/yum.repos.d/fedora-virt-preview.repo https://fedorapeople.org/groups/virt/virt-preview/fedora-virt-preview.repo ---> Using cache ---> 88f43b954f9f Step 4/12 : RUN dnf -y install libvirt-devel-${LIBVIRT_VERSION} make git mercurial sudo gcc findutils gradle rsync-daemon rsync qemu-img protobuf-compiler && dnf -y clean all ---> Using cache ---> 06c70b43758a Step 5/12 : ENV GIMME_GO_VERSION 1.10 ---> Using cache ---> e5b3ae738662 Step 6/12 : RUN mkdir -p /gimme && curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | HOME=/gimme bash >> /etc/profile.d/gimme.sh ---> Using cache ---> 3e3d43f49e45 Step 7/12 : ENV GOPATH "/go" GOBIN "/usr/bin" ---> Using cache ---> 73e8a3aa263a Step 8/12 : ADD rsyncd.conf /etc/rsyncd.conf ---> Using cache ---> bc244b1c712b Step 9/12 : RUN mkdir -p /go && source /etc/profile.d/gimme.sh && go get github.com/mattn/goveralls && go get -u github.com/Masterminds/glide && go get golang.org/x/tools/cmd/goimports && git clone https://github.com/mvdan/sh.git $GOPATH/src/mvdan.cc/sh && cd /go/src/mvdan.cc/sh/cmd/shfmt && git checkout v2.5.0 && go get mvdan.cc/sh/cmd/shfmt && go install && go get -u github.com/golang/mock/gomock && go get -u github.com/rmohr/mock/mockgen && go get -u github.com/rmohr/go-swagger-utils/swagger-doc && go get -u github.com/onsi/ginkgo/ginkgo && go get -u -d k8s.io/code-generator/cmd/deepcopy-gen && go get -u -d k8s.io/code-generator/cmd/defaulter-gen && go get -u -d k8s.io/code-generator/cmd/openapi-gen && cd /go/src/k8s.io/code-generator/cmd/deepcopy-gen && git checkout release-1.9 && go install && cd /go/src/k8s.io/code-generator/cmd/defaulter-gen && git checkout release-1.9 && go install && cd /go/src/k8s.io/code-generator/cmd/openapi-gen && git checkout release-1.9 && go install && go get -u -d github.com/golang/protobuf/protoc-gen-go && cd /go/src/github.com/golang/protobuf/protoc-gen-go && git checkout 1643683e1b54a9e88ad26d98f81400c8c9d9f4f9 && go install ---> Using cache ---> 4cd1786b2bc8 Step 10/12 : RUN pip install j2cli ---> Using cache ---> b51a532fa53a Step 11/12 : ADD entrypoint.sh /entrypoint.sh ---> Using cache ---> 3bc0185264f6 Step 12/12 : ENTRYPOINT /entrypoint.sh ---> Using cache ---> dcf2b21fa2ed Successfully built dcf2b21fa2ed go version go1.10 linux/amd64 go version go1.10 linux/amd64 find: '/root/go/src/kubevirt.io/kubevirt/_out/cmd': No such file or directory Compiling tests... compiled tests.test hack/build-docker.sh build Sending build context to Docker daemon 40.4 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-controller ---> Using cache ---> b00c84523b53 Step 4/8 : WORKDIR /home/virt-controller ---> Using cache ---> b76b8bd8cd39 Step 5/8 : USER 1001 ---> Using cache ---> b6d9ad9ed232 Step 6/8 : COPY virt-controller /usr/bin/virt-controller ---> Using cache ---> 0bf9563134f5 Step 7/8 : ENTRYPOINT /usr/bin/virt-controller ---> Using cache ---> 8f484abc807f Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release2" '' "virt-controller" '' ---> Running in 9a40cd271c49 ---> 4d0369c860d0 Removing intermediate container 9a40cd271c49 Successfully built 4d0369c860d0 Sending build context to Docker daemon 43.32 MB Step 1/10 : FROM kubevirt/libvirt:4.2.0 ---> 5f0bfe81a3e0 Step 2/10 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 945996802736 Step 3/10 : RUN dnf -y install socat genisoimage util-linux libcgroup-tools ethtool net-tools sudo && dnf -y clean all && test $(id -u qemu) = 107 # make sure that the qemu user really is 107 ---> Using cache ---> 672f9ab56316 Step 4/10 : COPY virt-launcher /usr/bin/virt-launcher ---> Using cache ---> 2dda6072016f Step 5/10 : COPY kubevirt-sudo /etc/sudoers.d/kubevirt ---> Using cache ---> 78ded6a6f87d Step 6/10 : RUN setcap CAP_NET_BIND_SERVICE=+eip /usr/bin/qemu-system-x86_64 ---> Using cache ---> 750119981d77 Step 7/10 : RUN mkdir -p /usr/share/kubevirt/virt-launcher ---> Using cache ---> e963c3799f73 Step 8/10 : COPY entrypoint.sh libvirtd.sh sock-connector /usr/share/kubevirt/virt-launcher/ ---> Using cache ---> 31c0a6a1d28e Step 9/10 : ENTRYPOINT /usr/share/kubevirt/virt-launcher/entrypoint.sh ---> Using cache ---> 84d89809aeab Step 10/10 : LABEL "kubevirt-functional-tests-openshift-3.10-release2" '' "virt-launcher" '' ---> Running in 17d037676f67 ---> b4a542ea64ab Removing intermediate container 17d037676f67 Successfully built b4a542ea64ab Sending build context to Docker daemon 41.7 MB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/5 : COPY virt-handler /usr/bin/virt-handler ---> Using cache ---> 1576a0d85438 Step 4/5 : ENTRYPOINT /usr/bin/virt-handler ---> Using cache ---> 581c52468115 Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release2" '' "virt-handler" '' ---> Running in 00046dd11af9 ---> d809ee82520d Removing intermediate container 00046dd11af9 Successfully built d809ee82520d Sending build context to Docker daemon 38.81 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-api ---> Using cache ---> ed1ebf600ee1 Step 4/8 : WORKDIR /home/virt-api ---> Using cache ---> 0769dad023e5 Step 5/8 : USER 1001 ---> Using cache ---> 0cb65afb0c2b Step 6/8 : COPY virt-api /usr/bin/virt-api ---> Using cache ---> 7bc9c63349a4 Step 7/8 : ENTRYPOINT /usr/bin/virt-api ---> Using cache ---> c52c0070432f Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release2" '' "virt-api" '' ---> Running in 965aa126478e ---> 401c210a8d16 Removing intermediate container 965aa126478e Successfully built 401c210a8d16 Sending build context to Docker daemon 4.096 kB Step 1/7 : FROM fedora:28 ---> cc510acfcd70 Step 2/7 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/7 : ENV container docker ---> Using cache ---> 62847a2a1fa8 Step 4/7 : RUN mkdir -p /images/custom /images/alpine && truncate -s 64M /images/custom/disk.img && curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /images/alpine/disk.img ---> Using cache ---> 02134835a6aa Step 5/7 : ADD entrypoint.sh / ---> Using cache ---> ec0843818da7 Step 6/7 : CMD /entrypoint.sh ---> Using cache ---> 754029bb4bd2 Step 7/7 : LABEL "disks-images-provider" '' "kubevirt-functional-tests-openshift-3.10-release2" '' ---> Using cache ---> 38aa989fe395 Successfully built 38aa989fe395 Sending build context to Docker daemon 2.56 kB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/5 : ENV container docker ---> Using cache ---> 62847a2a1fa8 Step 4/5 : RUN dnf -y install procps-ng nmap-ncat && dnf -y clean all ---> Using cache ---> 207487abe7b2 Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release2" '' "vm-killer" '' ---> Using cache ---> 952e5b31d390 Successfully built 952e5b31d390 Sending build context to Docker daemon 5.12 kB Step 1/7 : FROM debian:sid ---> 68f33cf86aab Step 2/7 : MAINTAINER "David Vossel" \ ---> Using cache ---> 5734d749eb5c Step 3/7 : ENV container docker ---> Using cache ---> f8775a77966f Step 4/7 : RUN apt-get update && apt-get install -y bash curl bzip2 qemu-utils && mkdir -p /disk && rm -rf /var/lib/apt/lists/* ---> Using cache ---> 1a40cf222a61 Step 5/7 : ADD entry-point.sh / ---> Using cache ---> 77b545d92fe7 Step 6/7 : CMD /entry-point.sh ---> Using cache ---> dfe20d463305 Step 7/7 : LABEL "kubevirt-functional-tests-openshift-3.10-release2" '' "registry-disk-v1alpha" '' ---> Using cache ---> 106666ae1184 Successfully built 106666ae1184 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33761/kubevirt/registry-disk-v1alpha:devel ---> 106666ae1184 Step 2/4 : MAINTAINER "David Vossel" \ ---> Using cache ---> 556f50466884 Step 3/4 : RUN curl https://download.cirros-cloud.net/0.4.0/cirros-0.4.0-x86_64-disk.img > /disk/cirros.img ---> Using cache ---> 22b7f41a20c8 Step 4/4 : LABEL "cirros-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release2" '' ---> Using cache ---> 4c23a7cee751 Successfully built 4c23a7cee751 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33761/kubevirt/registry-disk-v1alpha:devel ---> 106666ae1184 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 338b65eb8134 Step 3/4 : RUN curl -g -L https://download.fedoraproject.org/pub/fedora/linux/releases/27/CloudImages/x86_64/images/Fedora-Cloud-Base-27-1.6.x86_64.qcow2 > /disk/fedora.qcow2 ---> Using cache ---> fa9bcc8a22c9 Step 4/4 : LABEL "fedora-cloud-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release2" '' ---> Using cache ---> d4847c0da79a Successfully built d4847c0da79a Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33761/kubevirt/registry-disk-v1alpha:devel ---> 106666ae1184 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 338b65eb8134 Step 3/4 : RUN curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /disk/alpine.iso ---> Using cache ---> adf52d7bb805 Step 4/4 : LABEL "alpine-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release2" '' ---> Using cache ---> 77d0ce1cffa8 Successfully built 77d0ce1cffa8 Sending build context to Docker daemon 35.59 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virtctl ---> Using cache ---> 985fe391c056 Step 4/8 : WORKDIR /home/virtctl ---> Using cache ---> 3b2cae8ac543 Step 5/8 : USER 1001 ---> Using cache ---> 0c06e5b4a900 Step 6/8 : COPY subresource-access-test /subresource-access-test ---> Using cache ---> df1280571ee0 Step 7/8 : ENTRYPOINT /subresource-access-test ---> Using cache ---> 5e63bb8cdd94 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release2" '' "subresource-access-test" '' ---> Running in b245978ec0ff ---> 4cdba4355c37 Removing intermediate container b245978ec0ff Successfully built 4cdba4355c37 Sending build context to Docker daemon 3.072 kB Step 1/9 : FROM fedora:28 ---> cc510acfcd70 Step 2/9 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/9 : ENV container docker ---> Using cache ---> 62847a2a1fa8 Step 4/9 : RUN dnf -y install make git gcc && dnf -y clean all ---> Using cache ---> d3456b1644b1 Step 5/9 : ENV GIMME_GO_VERSION 1.9.2 ---> Using cache ---> 0ba81fddbba1 Step 6/9 : RUN mkdir -p /gimme && curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | HOME=/gimme bash >> /etc/profile.d/gimme.sh ---> Using cache ---> 5d33abe3f819 Step 7/9 : ENV GOPATH "/go" GOBIN "/usr/bin" ---> Using cache ---> 783826523be1 Step 8/9 : RUN mkdir -p /go && source /etc/profile.d/gimme.sh && go get github.com/masterzen/winrm-cli ---> Using cache ---> 711bc8d15952 Step 9/9 : LABEL "kubevirt-functional-tests-openshift-3.10-release2" '' "winrmcli" '' ---> Using cache ---> d1d2998465ac Successfully built d1d2998465ac Sending build context to Docker daemon 36.8 MB Step 1/5 : FROM fedora:27 ---> 9110ae7f579f Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> e3238544ad97 Step 3/5 : COPY example-hook-sidecar /example-hook-sidecar ---> Using cache ---> 1bdc82fa6ea8 Step 4/5 : ENTRYPOINT /example-hook-sidecar ---> Using cache ---> 519ebb52fe40 Step 5/5 : LABEL "example-hook-sidecar" '' "kubevirt-functional-tests-openshift-3.10-release2" '' ---> Running in 19780c214157 ---> fe4ee26a437b Removing intermediate container 19780c214157 Successfully built fe4ee26a437b hack/build-docker.sh push The push refers to a repository [localhost:33761/kubevirt/virt-controller] bc4a0c364265: Preparing aa89340cf7a8: Preparing 891e1e4ef82a: Preparing aa89340cf7a8: Pushed bc4a0c364265: Pushed 891e1e4ef82a: Pushed devel: digest: sha256:93754d29c10027ff527f4f79d34843372325c570949a688e93d477b25d818003 size: 949 The push refers to a repository [localhost:33761/kubevirt/virt-launcher] 3da3839dbd1f: Preparing 95c7934ce1e6: Preparing 0c2bc01e9ba4: Preparing 0a56c4bb1f31: Preparing 74adc1efaba4: Preparing 633427c64a24: Preparing da38cf808aa5: Preparing b83399358a92: Preparing 186d8b3e4fd8: Preparing fa6154170bf5: Preparing 5eefb9960a36: Preparing 891e1e4ef82a: Preparing 633427c64a24: Waiting da38cf808aa5: Waiting b83399358a92: Waiting 186d8b3e4fd8: Waiting 891e1e4ef82a: Waiting fa6154170bf5: Waiting 0a56c4bb1f31: Pushed 3da3839dbd1f: Pushed 95c7934ce1e6: Pushed da38cf808aa5: Pushed b83399358a92: Pushed fa6154170bf5: Pushed 186d8b3e4fd8: Pushed 891e1e4ef82a: Mounted from kubevirt/virt-controller 0c2bc01e9ba4: Pushed 633427c64a24: Pushed 74adc1efaba4: Pushed 5eefb9960a36: Pushed devel: digest: sha256:e00edd02da3ee9ddd784e8f1f2b78d2ffd2c43c04ede49ce367141492281d3b2 size: 2828 The push refers to a repository [localhost:33761/kubevirt/virt-handler] 8f8080a870ba: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-launcher 8f8080a870ba: Pushed devel: digest: sha256:00aa9c5bfd74f64c26862d19e3071f6b8f699b32d74abdfb2a9ef475e46be80d size: 741 The push refers to a repository [localhost:33761/kubevirt/virt-api] c9930870042e: Preparing 82fc744c99b4: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-handler 82fc744c99b4: Pushed c9930870042e: Pushed devel: digest: sha256:bdbe05f4af8ba1c752e6b3d2075d59da232b34861e25b8744643424a3a2a6fce size: 948 The push refers to a repository [localhost:33761/kubevirt/disks-images-provider] 71ad31feb2c5: Preparing 21d4b721776e: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-api 71ad31feb2c5: Pushed 21d4b721776e: Pushed devel: digest: sha256:d13663d5908fe8b2bca0273f88a0dd0fa612b99f728c6963c9199cbadc5cdd5f size: 948 The push refers to a repository [localhost:33761/kubevirt/vm-killer] c4cfadeeaf5f: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/disks-images-provider c4cfadeeaf5f: Pushed devel: digest: sha256:884cf194cf4c614d56bc03ea24e3c2b567a74f60a07848859607ccab10a30ac3 size: 740 The push refers to a repository [localhost:33761/kubevirt/registry-disk-v1alpha] 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 661cce8d8e52: Pushed 41e0baba3077: Pushed 25edbec0eaea: Pushed devel: digest: sha256:88146f7f968916eded11853064069ece640572830ea6787a3759f1f2fa5f39ee size: 948 The push refers to a repository [localhost:33761/kubevirt/cirros-registry-disk-demo] c85cd4d54432: Preparing 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 661cce8d8e52: Mounted from kubevirt/registry-disk-v1alpha 41e0baba3077: Mounted from kubevirt/registry-disk-v1alpha 25edbec0eaea: Mounted from kubevirt/registry-disk-v1alpha c85cd4d54432: Pushed devel: digest: sha256:a1e44a6b614a9220e171811a7429dcd9e48c73d1e5bd45d1d4946c3a7266c682 size: 1160 The push refers to a repository [localhost:33761/kubevirt/fedora-cloud-registry-disk-demo] e5c5d7b8190e: Preparing 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 41e0baba3077: Mounted from kubevirt/cirros-registry-disk-demo 25edbec0eaea: Mounted from kubevirt/cirros-registry-disk-demo 661cce8d8e52: Mounted from kubevirt/cirros-registry-disk-demo e5c5d7b8190e: Pushed devel: digest: sha256:643a5753ffe149af3e91be435436a6bae03c16e9d8d9cf3a7996db3ece883121 size: 1161 The push refers to a repository [localhost:33761/kubevirt/alpine-registry-disk-demo] 7f43245da1fd: Preparing 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 25edbec0eaea: Mounted from kubevirt/fedora-cloud-registry-disk-demo 661cce8d8e52: Mounted from kubevirt/fedora-cloud-registry-disk-demo 41e0baba3077: Mounted from kubevirt/fedora-cloud-registry-disk-demo 7f43245da1fd: Pushed devel: digest: sha256:514d1a8c81964f5c1545f94bc55d20ce27be908001d8a91e81fcefbe5da7d100 size: 1160 The push refers to a repository [localhost:33761/kubevirt/subresource-access-test] 481c9fa8a9bf: Preparing 25cb73590a9d: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/vm-killer 25cb73590a9d: Pushed 481c9fa8a9bf: Pushed devel: digest: sha256:605852628affb8f7c63525eaea714969fcdf4b6d744bc77168792004534ad559 size: 948 The push refers to a repository [localhost:33761/kubevirt/winrmcli] f8083e002d0b: Preparing 53c709abc882: Preparing 9ca98a0f492b: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/subresource-access-test f8083e002d0b: Pushed 9ca98a0f492b: Pushed 53c709abc882: Pushed devel: digest: sha256:728aafa4fc8b15315a192aeab9f29b7b5da805f6f9971108ebc48192ba25d858 size: 1165 The push refers to a repository [localhost:33761/kubevirt/example-hook-sidecar] 975169ded35e: Preparing 39bae602f753: Preparing 975169ded35e: Pushed 39bae602f753: Pushed devel: digest: sha256:b734199c6b51f82b04caec4dc100a2cf7fb7b5c33229100b86ca12eaba9973a9 size: 740 make[1]: Leaving directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' Done ./cluster/clean.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release2 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release2 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-200-g25fa194 ++ KUBEVIRT_VERSION=v0.7.0-200-g25fa194 + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:14ffc4a28e24a2510c9b455b56f35f6193a00b71c9150705f6afec41b003fc76 ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace image_pull_policy ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system +++ image_pull_policy=IfNotPresent ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:33761/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace image_pull_policy + echo 'Cleaning up ...' Cleaning up ... + cluster/kubectl.sh get vmis --all-namespaces -o=custom-columns=NAME:.metadata.name,NAMESPACE:.metadata.namespace,FINALIZERS:.metadata.finalizers --no-headers + grep foregroundDeleteVirtualMachine + read p error: the server doesn't have a resource type "vmis" + _kubectl delete ds -l kubevirt.io -n kube-system --cascade=false --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=libvirt --force --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=virt-handler --force --grace-period 0 No resources found + namespaces=(default ${namespace}) + for i in '${namespaces[@]}' + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete deployment -l kubevirt.io No resources found + _kubectl -n default delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rs -l kubevirt.io No resources found + _kubectl -n default delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete services -l kubevirt.io No resources found + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n default delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete secrets -l kubevirt.io No resources found + _kubectl -n default delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pv -l kubevirt.io No resources found + _kubectl -n default delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pvc -l kubevirt.io No resources found + _kubectl -n default delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete ds -l kubevirt.io No resources found + _kubectl -n default delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n default delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pods -l kubevirt.io No resources found + _kubectl -n default delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n default delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rolebinding -l kubevirt.io No resources found + _kubectl -n default delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete roles -l kubevirt.io No resources found + _kubectl -n default delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterroles -l kubevirt.io No resources found + _kubectl -n default delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n default get crd offlinevirtualmachines.kubevirt.io ++ wc -l ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n default get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + for i in '${namespaces[@]}' + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete deployment -l kubevirt.io No resources found + _kubectl -n kube-system delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rs -l kubevirt.io No resources found + _kubectl -n kube-system delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete services -l kubevirt.io No resources found + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n kube-system delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete secrets -l kubevirt.io No resources found + _kubectl -n kube-system delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pv -l kubevirt.io No resources found + _kubectl -n kube-system delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pvc -l kubevirt.io No resources found + _kubectl -n kube-system delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete ds -l kubevirt.io No resources found + _kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n kube-system delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pods -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete roles -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterroles -l kubevirt.io No resources found + _kubectl -n kube-system delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io ++ wc -l ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + sleep 2 + echo Done Done ./cluster/deploy.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release2 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release2 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-200-g25fa194 ++ KUBEVIRT_VERSION=v0.7.0-200-g25fa194 + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:14ffc4a28e24a2510c9b455b56f35f6193a00b71c9150705f6afec41b003fc76 ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace image_pull_policy ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system +++ image_pull_policy=IfNotPresent ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:33761/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace image_pull_policy + echo 'Deploying ...' Deploying ... + [[ -z openshift-3.10-release ]] + [[ openshift-3.10-release =~ .*-dev ]] + [[ openshift-3.10-release =~ .*-release ]] + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/demo-content.yaml =~ .*demo.* ]] + continue + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml =~ .*demo.* ]] + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml clusterrole.rbac.authorization.k8s.io "kubevirt.io:admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:edit" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:view" created serviceaccount "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver-auth-delegator" created rolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created role.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-controller" created serviceaccount "kubevirt-controller" created serviceaccount "kubevirt-privileged" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller-cluster-admin" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-privileged-cluster-admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:default" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt.io:default" created service "virt-api" created deployment.extensions "virt-api" created deployment.extensions "virt-controller" created daemonset.extensions "virt-handler" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstances.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancereplicasets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancepresets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachines.kubevirt.io" created + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R persistentvolumeclaim "disk-alpine" created persistentvolume "host-path-disk-alpine" created persistentvolumeclaim "disk-custom" created persistentvolume "host-path-disk-custom" created daemonset.extensions "disks-images-provider" created serviceaccount "kubevirt-testing" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-testing-cluster-admin" created + [[ os-3.10.0 =~ os-* ]] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-controller"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-testing"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-privileged"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-apiserver"] + _kubectl adm policy add-scc-to-user privileged admin + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged admin scc "privileged" added to: ["admin"] + echo Done Done + namespaces=(kube-system default) + [[ kube-system != \k\u\b\e\-\s\y\s\t\e\m ]] + timeout=300 + sample=30 + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n kube-system --no-headers ++ cluster/kubectl.sh get pods -n kube-system --no-headers ++ grep -v Running + '[' -n 'disks-images-provider-2c85x 0/1 ContainerCreating 0 4s disks-images-provider-78krt 0/1 ContainerCreating 0 4s virt-api-7d79764579-2n99b 0/1 ContainerCreating 0 5s virt-api-7d79764579-2x6zh 0/1 ContainerCreating 0 5s virt-controller-7d57d96b65-kch7q 0/1 ContainerCreating 0 5s virt-controller-7d57d96b65-m6vfj 0/1 ContainerCreating 0 5s virt-handler-x58fz 0/1 ContainerCreating 0 5s virt-handler-xnssb 0/1 ContainerCreating 0 5s' ']' + echo 'Waiting for kubevirt pods to enter the Running state ...' Waiting for kubevirt pods to enter the Running state ... + kubectl get pods -n kube-system --no-headers + cluster/kubectl.sh get pods -n kube-system --no-headers + grep -v Running disks-images-provider-2c85x 0/1 ContainerCreating 0 4s disks-images-provider-78krt 0/1 ContainerCreating 0 4s virt-api-7d79764579-2n99b 0/1 ContainerCreating 0 5s virt-api-7d79764579-2x6zh 0/1 ContainerCreating 0 5s virt-controller-7d57d96b65-kch7q 0/1 ContainerCreating 0 5s virt-controller-7d57d96b65-m6vfj 0/1 ContainerCreating 0 5s virt-handler-x58fz 0/1 ContainerCreating 0 5s virt-handler-xnssb 0/1 ContainerCreating 0 5s + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system --no-headers ++ cluster/kubectl.sh get pods -n kube-system --no-headers ++ grep -v Running + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n 'false false' ']' + echo 'Waiting for KubeVirt containers to become ready ...' Waiting for KubeVirt containers to become ready ... + kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + grep false false + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n '' ']' + kubectl get pods -n kube-system + cluster/kubectl.sh get pods -n kube-system NAME READY STATUS RESTARTS AGE disks-images-provider-2c85x 1/1 Running 0 1m disks-images-provider-78krt 1/1 Running 0 1m master-api-node01 1/1 Running 1 5d master-controllers-node01 1/1 Running 2 5d master-etcd-node01 1/1 Running 1 5d virt-api-7d79764579-2n99b 1/1 Running 0 1m virt-api-7d79764579-2x6zh 1/1 Running 0 1m virt-controller-7d57d96b65-kch7q 1/1 Running 0 1m virt-controller-7d57d96b65-m6vfj 1/1 Running 0 1m virt-handler-x58fz 1/1 Running 0 1m virt-handler-xnssb 1/1 Running 0 1m + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n default --no-headers ++ cluster/kubectl.sh get pods -n default --no-headers ++ grep -v Running + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n '' ']' + kubectl get pods -n default + cluster/kubectl.sh get pods -n default NAME READY STATUS RESTARTS AGE docker-registry-1-92ssr 1/1 Running 1 5d local-volume-provisioner-44jn2 1/1 Running 0 19m local-volume-provisioner-s5d76 1/1 Running 0 19m registry-console-1-k87ct 1/1 Running 2 5d router-1-49jwl 1/1 Running 1 5d + kubectl version + cluster/kubectl.sh version oc v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 features: Basic-Auth GSSAPI Kerberos SPNEGO Server https://127.0.0.1:33758 openshift v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 + ginko_params='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + [[ openshift-3.10-release =~ windows.* ]] + FUNC_TEST_ARGS='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + make functest hack/dockerized "hack/build-func-tests.sh" Sending build context to Docker daemon 5.632 kB Step 1/12 : FROM fedora:28 ---> cc510acfcd70 Step 2/12 : ENV LIBVIRT_VERSION 4.2.0 ---> Using cache ---> b1088795aeb6 Step 3/12 : RUN curl --output /etc/yum.repos.d/fedora-virt-preview.repo https://fedorapeople.org/groups/virt/virt-preview/fedora-virt-preview.repo ---> Using cache ---> 88f43b954f9f Step 4/12 : RUN dnf -y install libvirt-devel-${LIBVIRT_VERSION} make git mercurial sudo gcc findutils gradle rsync-daemon rsync qemu-img protobuf-compiler && dnf -y clean all ---> Using cache ---> 06c70b43758a Step 5/12 : ENV GIMME_GO_VERSION 1.10 ---> Using cache ---> e5b3ae738662 Step 6/12 : RUN mkdir -p /gimme && curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | HOME=/gimme bash >> /etc/profile.d/gimme.sh ---> Using cache ---> 3e3d43f49e45 Step 7/12 : ENV GOPATH "/go" GOBIN "/usr/bin" ---> Using cache ---> 73e8a3aa263a Step 8/12 : ADD rsyncd.conf /etc/rsyncd.conf ---> Using cache ---> bc244b1c712b Step 9/12 : RUN mkdir -p /go && source /etc/profile.d/gimme.sh && go get github.com/mattn/goveralls && go get -u github.com/Masterminds/glide && go get golang.org/x/tools/cmd/goimports && git clone https://github.com/mvdan/sh.git $GOPATH/src/mvdan.cc/sh && cd /go/src/mvdan.cc/sh/cmd/shfmt && git checkout v2.5.0 && go get mvdan.cc/sh/cmd/shfmt && go install && go get -u github.com/golang/mock/gomock && go get -u github.com/rmohr/mock/mockgen && go get -u github.com/rmohr/go-swagger-utils/swagger-doc && go get -u github.com/onsi/ginkgo/ginkgo && go get -u -d k8s.io/code-generator/cmd/deepcopy-gen && go get -u -d k8s.io/code-generator/cmd/defaulter-gen && go get -u -d k8s.io/code-generator/cmd/openapi-gen && cd /go/src/k8s.io/code-generator/cmd/deepcopy-gen && git checkout release-1.9 && go install && cd /go/src/k8s.io/code-generator/cmd/defaulter-gen && git checkout release-1.9 && go install && cd /go/src/k8s.io/code-generator/cmd/openapi-gen && git checkout release-1.9 && go install && go get -u -d github.com/golang/protobuf/protoc-gen-go && cd /go/src/github.com/golang/protobuf/protoc-gen-go && git checkout 1643683e1b54a9e88ad26d98f81400c8c9d9f4f9 && go install ---> Using cache ---> 4cd1786b2bc8 Step 10/12 : RUN pip install j2cli ---> Using cache ---> b51a532fa53a Step 11/12 : ADD entrypoint.sh /entrypoint.sh ---> Using cache ---> 3bc0185264f6 Step 12/12 : ENTRYPOINT /entrypoint.sh ---> Using cache ---> dcf2b21fa2ed Successfully built dcf2b21fa2ed go version go1.10 linux/amd64 go version go1.10 linux/amd64 Compiling tests... compiled tests.test hack/functests.sh Running Suite: Tests Suite ========================== Random Seed: 1533666506 Will run 151 of 151 specs Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 18:29:16 http: TLS handshake error from 10.129.0.1:48956: EOF 2018/08/07 18:29:26 http: TLS handshake error from 10.129.0.1:48962: EOF 2018/08/07 18:29:36 http: TLS handshake error from 10.129.0.1:48970: EOF 2018/08/07 18:29:46 http: TLS handshake error from 10.129.0.1:48976: EOF 2018/08/07 18:29:56 http: TLS handshake error from 10.129.0.1:48982: EOF level=info timestamp=2018-08-07T18:29:57.652493Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 18:30:06 http: TLS handshake error from 10.129.0.1:48998: EOF 2018/08/07 18:30:16 http: TLS handshake error from 10.129.0.1:49004: EOF 2018/08/07 18:30:26 http: TLS handshake error from 10.129.0.1:49010: EOF 2018/08/07 18:30:36 http: TLS handshake error from 10.129.0.1:49016: EOF 2018/08/07 18:30:46 http: TLS handshake error from 10.129.0.1:49022: EOF 2018/08/07 18:30:56 http: TLS handshake error from 10.129.0.1:49028: EOF 2018/08/07 18:31:06 http: TLS handshake error from 10.129.0.1:49034: EOF 2018/08/07 18:31:16 http: TLS handshake error from 10.129.0.1:49040: EOF 2018/08/07 18:31:26 http: TLS handshake error from 10.129.0.1:49046: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T18:30:54.169909Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:30:56 http: TLS handshake error from 10.128.0.1:35234: EOF level=info timestamp=2018-08-07T18:30:57.488928Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T18:30:59.743386Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:31:00.005122Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:31:01.863675Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:31:04.244950Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:31:06 http: TLS handshake error from 10.128.0.1:35282: EOF level=info timestamp=2018-08-07T18:31:14.363796Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:31:16 http: TLS handshake error from 10.128.0.1:35324: EOF level=info timestamp=2018-08-07T18:31:19.261935Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T18:31:19.266888Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T18:31:24.594255Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:31:26 http: TLS handshake error from 10.128.0.1:35366: EOF level=info timestamp=2018-08-07T18:31:27.322236Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T18:27:03.267503Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer limitrangeInformer" level=info timestamp=2018-08-07T18:27:03.267520Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-08-07T18:27:03.267537Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-08-07T18:27:03.267554Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-08-07T18:27:03.267570Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-08-07T18:27:03.267585Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-08-07T18:27:03.267649Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-08-07T18:27:03.268936Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-08-07T18:27:03.269039Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-08-07T18:27:03.269081Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-08-07T18:27:03.269231Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-08-07T18:28:28.656653Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv8lhbpr8dlllwb548249bmpkzmpsn5vpmxdbtd6pspq72684bkwb6dqj6chjmtq kind= uid=adc86199-9a6f-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:28:28.657810Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv8lhbpr8dlllwb548249bmpkzmpsn5vpmxdbtd6pspq72684bkwb6dqj6chjmtq kind= uid=adc86199-9a6f-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:28:28.793356Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv8lhbpr8dlllwb548249bmpkzmpsn5vpmxdbtd6pspq72684bkwb6dqj6chjmtq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv8lhbpr8dlllwb548249bmpkzmpsn5vpmxdbtd6pspq72684bkwb6dqj6chjmtq" level=info timestamp=2018-08-07T18:28:28.828183Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv8lhbpr8dlllwb548249bmpkzmpsn5vpmxdbtd6pspq72684bkwb6dqj6chjmtq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv8lhbpr8dlllwb548249bmpkzmpsn5vpmxdbtd6pspq72684bkwb6dqj6chjmtq" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T18:27:08.427089Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-07T18:27:08.562037Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:08.564833Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:08.663683Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:08.680027Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:08.685407Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmiv8lhbpr8dlllwb548249bmpkzmpsn5vpmxdbtn5grd Pod phase: Pending level=info timestamp=2018-08-07T18:28:38.717676Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T18:28:38.718024Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T18:28:38.719947Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T18:28:48.828889Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T18:28:48.890478Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiv8lhbpr8dlllwb548249bmpkzmpsn5vpmxdbtd6pspq72684bkwb6dqj6chjmtq" level=info timestamp=2018-08-07T18:28:48.893732Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T18:28:48.894184Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [181.044 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with 3 CPU cores /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:56 should report 3 cpu cores under guest OS [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:62 Timed out after 90.011s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ STEP: Starting a VirtualMachineInstance level=info timestamp=2018-08-07T18:28:29.012394Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmiv8lhbpr8dlllwb548249bmpkzmpsn5vpmxdbtd6pspq72684bkwb6dqj6chjmtq kind=VirtualMachineInstance uid=adc86199-9a6f-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmiv8lhbpr8dlllwb548249bmpkzmpsn5vpmxdbtn5grd" • Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 18:32:26 http: TLS handshake error from 10.129.0.1:49082: EOF level=info timestamp=2018-08-07T18:32:27.702098Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 18:32:36 http: TLS handshake error from 10.129.0.1:49090: EOF 2018/08/07 18:32:46 http: TLS handshake error from 10.129.0.1:49096: EOF 2018/08/07 18:32:56 http: TLS handshake error from 10.129.0.1:49102: EOF level=info timestamp=2018-08-07T18:32:57.810659Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 18:33:06 http: TLS handshake error from 10.129.0.1:49108: EOF 2018/08/07 18:33:16 http: TLS handshake error from 10.129.0.1:49114: EOF 2018/08/07 18:33:26 http: TLS handshake error from 10.129.0.1:49120: EOF 2018/08/07 18:33:36 http: TLS handshake error from 10.129.0.1:49126: EOF 2018/08/07 18:33:46 http: TLS handshake error from 10.129.0.1:49132: EOF 2018/08/07 18:33:56 http: TLS handshake error from 10.129.0.1:49138: EOF 2018/08/07 18:34:06 http: TLS handshake error from 10.129.0.1:49144: EOF 2018/08/07 18:34:16 http: TLS handshake error from 10.129.0.1:49150: EOF 2018/08/07 18:34:26 http: TLS handshake error from 10.129.0.1:49156: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running 2018/08/07 18:33:56 http: TLS handshake error from 10.128.0.1:36026: EOF level=info timestamp=2018-08-07T18:33:57.421880Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T18:33:58.234580Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:34:01.476001Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:34:01.559584Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:34:03.158141Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:34:06 http: TLS handshake error from 10.128.0.1:36072: EOF level=info timestamp=2018-08-07T18:34:08.497566Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:34:16 http: TLS handshake error from 10.128.0.1:36122: EOF level=info timestamp=2018-08-07T18:34:18.703012Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:34:19.279568Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T18:34:19.283809Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/08/07 18:34:26 http: TLS handshake error from 10.128.0.1:36170: EOF level=info timestamp=2018-08-07T18:34:27.460843Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T18:34:28.893870Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T18:27:03.267649Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-08-07T18:27:03.268936Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-08-07T18:27:03.269039Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-08-07T18:27:03.269081Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-08-07T18:27:03.269231Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-08-07T18:28:28.656653Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv8lhbpr8dlllwb548249bmpkzmpsn5vpmxdbtd6pspq72684bkwb6dqj6chjmtq kind= uid=adc86199-9a6f-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:28:28.657810Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv8lhbpr8dlllwb548249bmpkzmpsn5vpmxdbtd6pspq72684bkwb6dqj6chjmtq kind= uid=adc86199-9a6f-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:28:28.793356Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv8lhbpr8dlllwb548249bmpkzmpsn5vpmxdbtd6pspq72684bkwb6dqj6chjmtq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv8lhbpr8dlllwb548249bmpkzmpsn5vpmxdbtd6pspq72684bkwb6dqj6chjmtq" level=info timestamp=2018-08-07T18:28:28.828183Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv8lhbpr8dlllwb548249bmpkzmpsn5vpmxdbtd6pspq72684bkwb6dqj6chjmtq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv8lhbpr8dlllwb548249bmpkzmpsn5vpmxdbtd6pspq72684bkwb6dqj6chjmtq" level=info timestamp=2018-08-07T18:31:29.499485Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv8lhbpr8dlllwb548249bmpkzmpsn5vpmxdbtd6pspq72684bkwb6dqj6chjmtq\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiv8lhbpr8dlllwb548249bmpkzmpsn5vpmxdbtd6pspq72684bkwb6dqj6chjmtq, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: adc86199-9a6f-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv8lhbpr8dlllwb548249bmpkzmpsn5vpmxdbtd6pspq72684bkwb6dqj6chjmtq" level=info timestamp=2018-08-07T18:31:29.761162Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidjdqvbrkm9wlf8sbhgs4xf6fmvch2h9k69nzh29m8b4zdh5nmqr9t4jlbrkwv7s kind= uid=19bc24b2-9a70-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:31:29.761861Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidjdqvbrkm9wlf8sbhgs4xf6fmvch2h9k69nzh29m8b4zdh5nmqr9t4jlbrkwv7s kind= uid=19bc24b2-9a70-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:31:30.969916Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidjdqvbrkm9wlf8sbhgs4xf6fmvch2h9k69nzh29m8b4zdh5nmqr9t4jlbrkwv7s\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmidjdqvbrkm9wlf8sbhgs4xf6fmvch2h9k69nzh29m8b4zdh5nmqr9t4jlbrkwv7s, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 19bc24b2-9a70-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidjdqvbrkm9wlf8sbhgs4xf6fmvch2h9k69nzh29m8b4zdh5nmqr9t4jlbrkwv7s" level=info timestamp=2018-08-07T18:31:31.196304Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimlxxslncdg76cgh5c7q55p9wmbb86bs2djrw5jxtlsmbstp8sm6wlnmkf7wwdl6 kind= uid=1a9d11f8-9a70-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:31:31.196792Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimlxxslncdg76cgh5c7q55p9wmbb86bs2djrw5jxtlsmbstp8sm6wlnmkf7wwdl6 kind= uid=1a9d11f8-9a70-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T18:27:08.427089Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-07T18:27:08.562037Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:08.564833Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:08.663683Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:08.680027Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:08.685407Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmimlxxslncdg76cgh5c7q55p9wmbb86bs2djrw5njtlg Pod phase: Pending level=info timestamp=2018-08-07T18:31:35.905723Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T18:31:35.906010Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T18:31:35.907664Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T18:31:46.896575Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T18:31:46.959683Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmimlxxslncdg76cgh5c7q55p9wmbb86bs2djrw5jxtlsmbstp8sm6wlnmkf7wwdl6" level=info timestamp=2018-08-07T18:31:46.964409Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T18:31:46.964956Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" ------------------------------ • Failure [180.862 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:164 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-2Mi [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Timed out after 90.012s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ STEP: Starting a VM level=info timestamp=2018-08-07T18:31:31.490545Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmimlxxslncdg76cgh5c7q55p9wmbb86bs2djrw5jxtlsmbstp8sm6wlnmkf7wwdl6 kind=VirtualMachineInstance uid=1a9d11f8-9a70-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmimlxxslncdg76cgh5c7q55p9wmbb86bs2djrw5njtlg" S [SKIPPING] [0.326 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:164 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-1Gi [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 No node with hugepages hugepages-1Gi capacity /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:216 ------------------------------ • Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 18:35:46 http: TLS handshake error from 10.129.0.1:49206: EOF 2018/08/07 18:35:56 http: TLS handshake error from 10.129.0.1:49212: EOF level=info timestamp=2018-08-07T18:35:57.455301Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T18:35:57.593080Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 18:36:06 http: TLS handshake error from 10.129.0.1:49218: EOF 2018/08/07 18:36:16 http: TLS handshake error from 10.129.0.1:49224: EOF 2018/08/07 18:36:26 http: TLS handshake error from 10.129.0.1:49230: EOF level=info timestamp=2018-08-07T18:36:27.789782Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 18:36:36 http: TLS handshake error from 10.129.0.1:49236: EOF 2018/08/07 18:36:46 http: TLS handshake error from 10.129.0.1:49242: EOF 2018/08/07 18:36:56 http: TLS handshake error from 10.129.0.1:49248: EOF 2018/08/07 18:37:06 http: TLS handshake error from 10.129.0.1:49254: EOF 2018/08/07 18:37:16 http: TLS handshake error from 10.129.0.1:49260: EOF 2018/08/07 18:37:26 http: TLS handshake error from 10.129.0.1:49266: EOF level=info timestamp=2018-08-07T18:37:27.655743Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T18:36:57.388978Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T18:37:01.898436Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:37:03.119529Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:37:03.150530Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:37:04.368536Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:37:06 http: TLS handshake error from 10.128.0.1:36894: EOF level=info timestamp=2018-08-07T18:37:12.083444Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:37:16 http: TLS handshake error from 10.128.0.1:36936: EOF level=info timestamp=2018-08-07T18:37:17.694464Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T18:37:17.698422Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T18:37:22.277433Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:37:26 http: TLS handshake error from 10.128.0.1:36978: EOF level=info timestamp=2018-08-07T18:37:32.461087Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:37:33.389878Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:37:33.423832Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T18:31:29.761162Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidjdqvbrkm9wlf8sbhgs4xf6fmvch2h9k69nzh29m8b4zdh5nmqr9t4jlbrkwv7s kind= uid=19bc24b2-9a70-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:31:29.761861Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidjdqvbrkm9wlf8sbhgs4xf6fmvch2h9k69nzh29m8b4zdh5nmqr9t4jlbrkwv7s kind= uid=19bc24b2-9a70-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:31:30.969916Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidjdqvbrkm9wlf8sbhgs4xf6fmvch2h9k69nzh29m8b4zdh5nmqr9t4jlbrkwv7s\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmidjdqvbrkm9wlf8sbhgs4xf6fmvch2h9k69nzh29m8b4zdh5nmqr9t4jlbrkwv7s, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 19bc24b2-9a70-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidjdqvbrkm9wlf8sbhgs4xf6fmvch2h9k69nzh29m8b4zdh5nmqr9t4jlbrkwv7s" level=info timestamp=2018-08-07T18:31:31.196304Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimlxxslncdg76cgh5c7q55p9wmbb86bs2djrw5jxtlsmbstp8sm6wlnmkf7wwdl6 kind= uid=1a9d11f8-9a70-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:31:31.196792Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimlxxslncdg76cgh5c7q55p9wmbb86bs2djrw5jxtlsmbstp8sm6wlnmkf7wwdl6 kind= uid=1a9d11f8-9a70-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:34:31.802983Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimlxxslncdg76cgh5c7q55p9wmbb86bs2djrw5jxtlsmbstp8sm6wlnmkf7wwdl6\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmimlxxslncdg76cgh5c7q55p9wmbb86bs2djrw5jxtlsmbstp8sm6wlnmkf7wwdl6, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1a9d11f8-9a70-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimlxxslncdg76cgh5c7q55p9wmbb86bs2djrw5jxtlsmbstp8sm6wlnmkf7wwdl6" level=info timestamp=2018-08-07T18:34:32.293658Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi29lqf92zx2q9dcnxk446n9c66mlm6dqp6kfhrmv5vglnlv26hpsn5nqhkkjlb6h kind= uid=868ed493-9a70-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:34:32.294528Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi29lqf92zx2q9dcnxk446n9c66mlm6dqp6kfhrmv5vglnlv26hpsn5nqhkkjlb6h kind= uid=868ed493-9a70-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:34:32.496355Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi29lqf92zx2q9dcnxk446n9c66mlm6dqp6kfhrmv5vglnlv26hpsn5nqhkkjlb6h\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi29lqf92zx2q9dcnxk446n9c66mlm6dqp6kfhrmv5vglnlv26hpsn5nqhkkjlb6h" level=info timestamp=2018-08-07T18:34:32.552300Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi29lqf92zx2q9dcnxk446n9c66mlm6dqp6kfhrmv5vglnlv26hpsn5nqhkkjlb6h\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi29lqf92zx2q9dcnxk446n9c66mlm6dqp6kfhrmv5vglnlv26hpsn5nqhkkjlb6h" level=info timestamp=2018-08-07T18:34:32.596216Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi29lqf92zx2q9dcnxk446n9c66mlm6dqp6kfhrmv5vglnlv26hpsn5nqhkkjlb6h\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi29lqf92zx2q9dcnxk446n9c66mlm6dqp6kfhrmv5vglnlv26hpsn5nqhkkjlb6h" level=info timestamp=2018-08-07T18:34:33.700777Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l kind= uid=8766d546-9a70-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:34:33.701216Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l kind= uid=8766d546-9a70-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:34:33.855075Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l" level=info timestamp=2018-08-07T18:34:33.896162Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T18:27:08.427089Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-07T18:27:08.562037Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:08.564833Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:08.663683Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:08.680027Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:08.685407Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkbfpb4 Pod phase: Pending level=info timestamp=2018-08-07T18:34:37.550944Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T18:34:37.551236Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T18:34:37.559081Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T18:34:48.497882Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T18:34:48.578884Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l" level=info timestamp=2018-08-07T18:34:48.582874Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T18:34:48.583284Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" ------------------------------ • Failure in Spec Setup (BeforeEach) [180.946 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:294 when CPU model defined [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:340 should report defined CPU model /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:341 Timed out after 90.014s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ level=info timestamp=2018-08-07T18:34:34.017631Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l kind=VirtualMachineInstance uid=8766d546-9a70-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkbfpb4" Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 18:38:26 http: TLS handshake error from 10.129.0.1:49302: EOF 2018/08/07 18:38:36 http: TLS handshake error from 10.129.0.1:49310: EOF 2018/08/07 18:38:46 http: TLS handshake error from 10.129.0.1:49316: EOF 2018/08/07 18:38:56 http: TLS handshake error from 10.129.0.1:49322: EOF level=info timestamp=2018-08-07T18:38:57.704465Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 18:39:06 http: TLS handshake error from 10.129.0.1:49328: EOF 2018/08/07 18:39:16 http: TLS handshake error from 10.129.0.1:49334: EOF 2018/08/07 18:39:26 http: TLS handshake error from 10.129.0.1:49340: EOF level=info timestamp=2018-08-07T18:39:27.706477Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 18:39:36 http: TLS handshake error from 10.129.0.1:49346: EOF 2018/08/07 18:39:46 http: TLS handshake error from 10.129.0.1:49352: EOF 2018/08/07 18:39:56 http: TLS handshake error from 10.129.0.1:49358: EOF 2018/08/07 18:40:06 http: TLS handshake error from 10.129.0.1:49364: EOF 2018/08/07 18:40:16 http: TLS handshake error from 10.129.0.1:49370: EOF 2018/08/07 18:40:26 http: TLS handshake error from 10.129.0.1:49376: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T18:39:45.011117Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:39:46 http: TLS handshake error from 10.128.0.1:37612: EOF level=info timestamp=2018-08-07T18:39:55.233566Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:39:56 http: TLS handshake error from 10.128.0.1:37656: EOF level=info timestamp=2018-08-07T18:39:57.381113Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T18:40:04.960197Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:40:04.964307Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:40:05.356556Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:40:05.703418Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:40:06 http: TLS handshake error from 10.128.0.1:37704: EOF level=info timestamp=2018-08-07T18:40:15.528777Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:40:16 http: TLS handshake error from 10.128.0.1:37746: EOF level=info timestamp=2018-08-07T18:40:25.735827Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:40:26 http: TLS handshake error from 10.128.0.1:37788: EOF level=info timestamp=2018-08-07T18:40:27.360116Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T18:34:31.802983Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimlxxslncdg76cgh5c7q55p9wmbb86bs2djrw5jxtlsmbstp8sm6wlnmkf7wwdl6\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmimlxxslncdg76cgh5c7q55p9wmbb86bs2djrw5jxtlsmbstp8sm6wlnmkf7wwdl6, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1a9d11f8-9a70-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimlxxslncdg76cgh5c7q55p9wmbb86bs2djrw5jxtlsmbstp8sm6wlnmkf7wwdl6" level=info timestamp=2018-08-07T18:34:32.293658Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi29lqf92zx2q9dcnxk446n9c66mlm6dqp6kfhrmv5vglnlv26hpsn5nqhkkjlb6h kind= uid=868ed493-9a70-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:34:32.294528Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi29lqf92zx2q9dcnxk446n9c66mlm6dqp6kfhrmv5vglnlv26hpsn5nqhkkjlb6h kind= uid=868ed493-9a70-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:34:32.496355Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi29lqf92zx2q9dcnxk446n9c66mlm6dqp6kfhrmv5vglnlv26hpsn5nqhkkjlb6h\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi29lqf92zx2q9dcnxk446n9c66mlm6dqp6kfhrmv5vglnlv26hpsn5nqhkkjlb6h" level=info timestamp=2018-08-07T18:34:32.552300Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi29lqf92zx2q9dcnxk446n9c66mlm6dqp6kfhrmv5vglnlv26hpsn5nqhkkjlb6h\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi29lqf92zx2q9dcnxk446n9c66mlm6dqp6kfhrmv5vglnlv26hpsn5nqhkkjlb6h" level=info timestamp=2018-08-07T18:34:32.596216Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi29lqf92zx2q9dcnxk446n9c66mlm6dqp6kfhrmv5vglnlv26hpsn5nqhkkjlb6h\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi29lqf92zx2q9dcnxk446n9c66mlm6dqp6kfhrmv5vglnlv26hpsn5nqhkkjlb6h" level=info timestamp=2018-08-07T18:34:33.700777Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l kind= uid=8766d546-9a70-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:34:33.701216Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l kind= uid=8766d546-9a70-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:34:33.855075Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l" level=info timestamp=2018-08-07T18:34:33.896162Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l" level=info timestamp=2018-08-07T18:37:34.431117Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 8766d546-9a70-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l" level=info timestamp=2018-08-07T18:37:34.638579Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7g8w4vffrnn9p275d7gnc4h7tbln kind= uid=f33e92a7-9a70-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:37:34.639222Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7g8w4vffrnn9p275d7gnc4h7tbln kind= uid=f33e92a7-9a70-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:37:34.799292Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7g8w4vffrnn9p275d7gnc4h7tbln\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7g8w4vffrnn9p275d7gnc4h7tbln" level=info timestamp=2018-08-07T18:37:34.854802Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7g8w4vffrnn9p275d7gnc4h7tbln\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7g8w4vffrnn9p275d7gnc4h7tbln" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T18:27:08.427089Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-07T18:27:08.562037Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:08.564833Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:08.663683Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:08.680027Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:08.685407Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7gfwbnm Pod phase: Pending level=info timestamp=2018-08-07T18:37:38.991933Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T18:37:38.992666Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T18:37:38.996382Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T18:37:49.788902Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T18:37:49.862516Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7g8w4vffrnn9p275d7gnc4h7tbln" level=info timestamp=2018-08-07T18:37:49.864867Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T18:37:49.865330Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [180.805 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:294 when CPU model equals to passthrough [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:368 should report exactly the same model as node CPU /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:369 Timed out after 90.014s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ level=info timestamp=2018-08-07T18:37:34.891967Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7g8w4vffrnn9p275d7gnc4h7tbln kind=VirtualMachineInstance uid=f33e92a7-9a70-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7gfwbnm" Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running level=info timestamp=2018-08-07T18:41:27.677912Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 18:41:36 http: TLS handshake error from 10.129.0.1:49420: EOF 2018/08/07 18:41:46 http: TLS handshake error from 10.129.0.1:49426: EOF 2018/08/07 18:41:56 http: TLS handshake error from 10.129.0.1:49432: EOF 2018/08/07 18:42:06 http: TLS handshake error from 10.129.0.1:49438: EOF 2018/08/07 18:42:16 http: TLS handshake error from 10.129.0.1:49444: EOF 2018/08/07 18:42:26 http: TLS handshake error from 10.129.0.1:49450: EOF level=info timestamp=2018-08-07T18:42:27.670264Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 18:42:36 http: TLS handshake error from 10.129.0.1:49456: EOF 2018/08/07 18:42:46 http: TLS handshake error from 10.129.0.1:49462: EOF 2018/08/07 18:42:56 http: TLS handshake error from 10.129.0.1:49468: EOF level=info timestamp=2018-08-07T18:42:57.744088Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 18:43:06 http: TLS handshake error from 10.129.0.1:49474: EOF 2018/08/07 18:43:16 http: TLS handshake error from 10.129.0.1:49480: EOF 2018/08/07 18:43:26 http: TLS handshake error from 10.129.0.1:49486: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T18:42:38.464000Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:42:46 http: TLS handshake error from 10.128.0.1:38404: EOF level=info timestamp=2018-08-07T18:42:48.643828Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:42:56 http: TLS handshake error from 10.128.0.1:38446: EOF level=info timestamp=2018-08-07T18:42:58.826242Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:43:06 http: TLS handshake error from 10.128.0.1:38492: EOF level=info timestamp=2018-08-07T18:43:06.830134Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:43:06.841370Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:43:07.286227Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:43:09.006493Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:43:16 http: TLS handshake error from 10.128.0.1:38534: EOF level=info timestamp=2018-08-07T18:43:19.194006Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:43:26 http: TLS handshake error from 10.128.0.1:38576: EOF level=info timestamp=2018-08-07T18:43:27.463621Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T18:43:29.395328Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T18:34:32.552300Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi29lqf92zx2q9dcnxk446n9c66mlm6dqp6kfhrmv5vglnlv26hpsn5nqhkkjlb6h\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi29lqf92zx2q9dcnxk446n9c66mlm6dqp6kfhrmv5vglnlv26hpsn5nqhkkjlb6h" level=info timestamp=2018-08-07T18:34:32.596216Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi29lqf92zx2q9dcnxk446n9c66mlm6dqp6kfhrmv5vglnlv26hpsn5nqhkkjlb6h\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi29lqf92zx2q9dcnxk446n9c66mlm6dqp6kfhrmv5vglnlv26hpsn5nqhkkjlb6h" level=info timestamp=2018-08-07T18:34:33.700777Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l kind= uid=8766d546-9a70-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:34:33.701216Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l kind= uid=8766d546-9a70-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:34:33.855075Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l" level=info timestamp=2018-08-07T18:34:33.896162Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l" level=info timestamp=2018-08-07T18:37:34.431117Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 8766d546-9a70-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l" level=info timestamp=2018-08-07T18:37:34.638579Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7g8w4vffrnn9p275d7gnc4h7tbln kind= uid=f33e92a7-9a70-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:37:34.639222Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7g8w4vffrnn9p275d7gnc4h7tbln kind= uid=f33e92a7-9a70-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:37:34.799292Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7g8w4vffrnn9p275d7gnc4h7tbln\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7g8w4vffrnn9p275d7gnc4h7tbln" level=info timestamp=2018-08-07T18:37:34.854802Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7g8w4vffrnn9p275d7gnc4h7tbln\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7g8w4vffrnn9p275d7gnc4h7tbln" level=info timestamp=2018-08-07T18:40:35.529829Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9 kind= uid=5f0ebd13-9a71-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:40:35.530704Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9 kind= uid=5f0ebd13-9a71-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:40:35.716233Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9" level=info timestamp=2018-08-07T18:40:35.773366Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T18:27:08.427089Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-07T18:27:08.562037Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:08.564833Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:08.663683Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:08.680027Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:08.685407Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbznd6s6x Pod phase: Pending level=info timestamp=2018-08-07T18:40:39.940156Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T18:40:39.940436Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T18:40:39.942074Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T18:40:50.871268Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T18:40:50.947131Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9" level=info timestamp=2018-08-07T18:40:50.948871Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T18:40:50.949404Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [180.909 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:294 when CPU model not defined [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:392 should report CPU model from libvirt capabilities /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:393 Timed out after 90.012s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ level=info timestamp=2018-08-07T18:40:35.801998Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9 kind=VirtualMachineInstance uid=5f0ebd13-9a71-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbznd6s6x" Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 18:44:56 http: TLS handshake error from 10.129.0.1:49542: EOF level=info timestamp=2018-08-07T18:44:57.829947Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 18:45:06 http: TLS handshake error from 10.129.0.1:49548: EOF 2018/08/07 18:45:16 http: TLS handshake error from 10.129.0.1:49554: EOF 2018/08/07 18:45:26 http: TLS handshake error from 10.129.0.1:49560: EOF level=info timestamp=2018-08-07T18:45:27.821899Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 18:45:36 http: TLS handshake error from 10.129.0.1:49566: EOF 2018/08/07 18:45:46 http: TLS handshake error from 10.129.0.1:49572: EOF 2018/08/07 18:45:56 http: TLS handshake error from 10.129.0.1:49578: EOF level=info timestamp=2018-08-07T18:45:57.799377Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 18:46:06 http: TLS handshake error from 10.129.0.1:49584: EOF 2018/08/07 18:46:16 http: TLS handshake error from 10.129.0.1:49590: EOF 2018/08/07 18:46:26 http: TLS handshake error from 10.129.0.1:49596: EOF level=info timestamp=2018-08-07T18:46:27.778017Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 18:46:36 http: TLS handshake error from 10.129.0.1:49602: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running 2018/08/07 18:45:56 http: TLS handshake error from 10.128.0.1:39258: EOF level=info timestamp=2018-08-07T18:45:57.115938Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T18:45:57.258790Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T18:46:02.291872Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:46:06 http: TLS handshake error from 10.128.0.1:39312: EOF level=info timestamp=2018-08-07T18:46:08.755182Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:46:08.818452Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:46:08.837091Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:46:12.474525Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:46:15.809076Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T18:46:15.812720Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/08/07 18:46:16 http: TLS handshake error from 10.128.0.1:39354: EOF level=info timestamp=2018-08-07T18:46:22.647624Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:46:26 http: TLS handshake error from 10.128.0.1:39396: EOF level=info timestamp=2018-08-07T18:46:32.844424Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T18:37:34.431117Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 8766d546-9a70-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminksjnj5wlkv5ts8xv9cbqpx66w4rk6tl6sqrkc7fpvjm2htd5jpvkxg24t6v44l" level=info timestamp=2018-08-07T18:37:34.638579Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7g8w4vffrnn9p275d7gnc4h7tbln kind= uid=f33e92a7-9a70-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:37:34.639222Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7g8w4vffrnn9p275d7gnc4h7tbln kind= uid=f33e92a7-9a70-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:37:34.799292Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7g8w4vffrnn9p275d7gnc4h7tbln\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7g8w4vffrnn9p275d7gnc4h7tbln" level=info timestamp=2018-08-07T18:37:34.854802Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7g8w4vffrnn9p275d7gnc4h7tbln\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijhqw8zwcjzxqtpn7fh8wgq9m74hxg8mqlzj7g8w4vffrnn9p275d7gnc4h7tbln" level=info timestamp=2018-08-07T18:40:35.529829Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9 kind= uid=5f0ebd13-9a71-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:40:35.530704Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9 kind= uid=5f0ebd13-9a71-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:40:35.716233Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9" level=info timestamp=2018-08-07T18:40:35.773366Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9" level=info timestamp=2018-08-07T18:43:36.154443Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5f0ebd13-9a71-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9" level=info timestamp=2018-08-07T18:43:36.175488Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5f0ebd13-9a71-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9" level=info timestamp=2018-08-07T18:43:36.435323Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkh5fcvbxgk98xtrx5zhzs6kjpngh kind= uid=cae1d938-9a71-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:43:36.436059Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkh5fcvbxgk98xtrx5zhzs6kjpngh kind= uid=cae1d938-9a71-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:43:36.649127Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkh5fcvbxgk98xtrx5zhzs6kjpngh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkh5fcvbxgk98xtrx5zhzs6kjpngh" level=info timestamp=2018-08-07T18:43:36.712463Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkh5fcvbxgk98xtrx5zhzs6kjpngh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkh5fcvbxgk98xtrx5zhzs6kjpngh" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T18:27:08.427089Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-07T18:27:08.562037Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:08.564833Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:08.663683Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:08.680027Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:08.685407Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkhqnlw4 Pod phase: Pending level=info timestamp=2018-08-07T18:43:41.317454Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T18:43:41.317721Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T18:43:41.319536Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T18:43:53.580887Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T18:43:53.676274Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkh5fcvbxgk98xtrx5zhzs6kjpngh" level=info timestamp=2018-08-07T18:43:53.679040Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T18:43:53.679773Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.860 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 New VirtualMachineInstance with all supported drives /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:413 should have all the device nodes [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:436 Timed out after 90.011s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ level=info timestamp=2018-08-07T18:43:36.713391Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkh5fcvbxgk98xtrx5zhzs6kjpngh kind=VirtualMachineInstance uid=cae1d938-9a71-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkhqnlw4" • [SLOW TEST:21.278 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:20.730 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given an vm /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:20.594 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi preset /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:20.709 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi replica set /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 18:48:46 http: TLS handshake error from 10.129.0.1:49682: EOF 2018/08/07 18:48:56 http: TLS handshake error from 10.129.0.1:49688: EOF 2018/08/07 18:49:06 http: TLS handshake error from 10.129.0.1:49694: EOF 2018/08/07 18:49:16 http: TLS handshake error from 10.129.0.1:49700: EOF 2018/08/07 18:49:26 http: TLS handshake error from 10.129.0.1:49706: EOF level=info timestamp=2018-08-07T18:49:27.751006Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 18:49:36 http: TLS handshake error from 10.129.0.1:49712: EOF 2018/08/07 18:49:46 http: TLS handshake error from 10.129.0.1:49718: EOF 2018/08/07 18:49:56 http: TLS handshake error from 10.129.0.1:49724: EOF 2018/08/07 18:50:06 http: TLS handshake error from 10.129.0.1:49730: EOF 2018/08/07 18:50:16 http: TLS handshake error from 10.129.0.1:49736: EOF 2018/08/07 18:50:26 http: TLS handshake error from 10.129.0.1:49742: EOF 2018/08/07 18:50:36 http: TLS handshake error from 10.129.0.1:49748: EOF 2018/08/07 18:50:46 http: TLS handshake error from 10.129.0.1:49756: EOF 2018/08/07 18:50:56 http: TLS handshake error from 10.129.0.1:49762: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T18:50:29.392463Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:50:29.426615Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:50:36 http: TLS handshake error from 10.128.0.1:40510: EOF level=info timestamp=2018-08-07T18:50:36.695835Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:50:36.731282Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:50:36.769672Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:50:37.786452Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:50:40.875307Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:50:41.780351Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:50:41.797711Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:50:46 http: TLS handshake error from 10.128.0.1:40556: EOF level=info timestamp=2018-08-07T18:50:47.983163Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:50:56 http: TLS handshake error from 10.128.0.1:40598: EOF level=info timestamp=2018-08-07T18:50:57.355616Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T18:50:58.157699Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T18:43:36.154443Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5f0ebd13-9a71-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9" level=info timestamp=2018-08-07T18:43:36.175488Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5f0ebd13-9a71-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwnwhchq9q6rdtq9s4nvdrkhlwl2snfdzpxbzn8zfdctqm8smbzbdlbq8vl52xm9" level=info timestamp=2018-08-07T18:43:36.435323Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkh5fcvbxgk98xtrx5zhzs6kjpngh kind= uid=cae1d938-9a71-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:43:36.436059Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkh5fcvbxgk98xtrx5zhzs6kjpngh kind= uid=cae1d938-9a71-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:43:36.649127Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkh5fcvbxgk98xtrx5zhzs6kjpngh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkh5fcvbxgk98xtrx5zhzs6kjpngh" level=info timestamp=2018-08-07T18:43:36.712463Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkh5fcvbxgk98xtrx5zhzs6kjpngh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkh5fcvbxgk98xtrx5zhzs6kjpngh" level=info timestamp=2018-08-07T18:48:00.429361Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis75d2k7rzdvbbkskqgcxqrv69sxnpsxnxbpzm8lzbkrtk6f459kfghbkqpmtjqn kind= uid=683d44e7-9a72-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:48:00.430240Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis75d2k7rzdvbbkskqgcxqrv69sxnpsxnxbpzm8lzbkrtk6f459kfghbkqpmtjqn kind= uid=683d44e7-9a72-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:48:00.577248Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmis75d2k7rzdvbbkskqgcxqrv69sxnpsxnxbpzm8lzbkrtk6f459kfghbkqpmtjqn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmis75d2k7rzdvbbkskqgcxqrv69sxnpsxnxbpzm8lzbkrtk6f459kfghbkqpmtjqn" level=info timestamp=2018-08-07T18:48:00.627761Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmis75d2k7rzdvbbkskqgcxqrv69sxnpsxnxbpzm8lzbkrtk6f459kfghbkqpmtjqn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmis75d2k7rzdvbbkskqgcxqrv69sxnpsxnxbpzm8lzbkrtk6f459kfghbkqpmtjqn" level=info timestamp=2018-08-07T18:48:00.718092Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmis75d2k7rzdvbbkskqgcxqrv69sxnpsxnxbpzm8lzbkrtk6f459kfghbkqpmtjqn\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmis75d2k7rzdvbbkskqgcxqrv69sxnpsxnxbpzm8lzbkrtk6f459kfghbkqpmtjqn, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 683d44e7-9a72-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmis75d2k7rzdvbbkskqgcxqrv69sxnpsxnxbpzm8lzbkrtk6f459kfghbkqpmtjqn" level=info timestamp=2018-08-07T18:48:00.901237Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2rtswsdfdvk7hlqdw5xwqdkgmxsq66nkh2xk9dl4kv58vtc9mjvmcjgbhdmmkwf kind= uid=68877fec-9a72-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:48:00.901668Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2rtswsdfdvk7hlqdw5xwqdkgmxsq66nkh2xk9dl4kv58vtc9mjvmcjgbhdmmkwf kind= uid=68877fec-9a72-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:48:01.034205Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2rtswsdfdvk7hlqdw5xwqdkgmxsq66nkh2xk9dl4kv58vtc9mjvmcjgbhdmmkwf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2rtswsdfdvk7hlqdw5xwqdkgmxsq66nkh2xk9dl4kv58vtc9mjvmcjgbhdmmkwf" level=info timestamp=2018-08-07T18:48:01.079628Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2rtswsdfdvk7hlqdw5xwqdkgmxsq66nkh2xk9dl4kv58vtc9mjvmcjgbhdmmkwf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2rtswsdfdvk7hlqdw5xwqdkgmxsq66nkh2xk9dl4kv58vtc9mjvmcjgbhdmmkwf" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T18:27:08.427089Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-07T18:27:08.562037Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:08.564833Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:08.663683Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:08.680027Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:08.685407Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmi2rtswsdfdvk7hlqdw5xwqdkgmxsq66nkh2xk9zl2fd Pod phase: Pending level=info timestamp=2018-08-07T18:48:05.065640Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T18:48:05.066102Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T18:48:05.069726Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T18:48:15.728241Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T18:48:15.819949Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi2rtswsdfdvk7hlqdw5xwqdkgmxsq66nkh2xk9dl4kv58vtc9mjvmcjgbhdmmkwf" level=info timestamp=2018-08-07T18:48:15.822694Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T18:48:15.823162Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" ------------------------------ • Failure [180.945 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 should start it [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:80 Timed out after 90.014s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ level=info timestamp=2018-08-07T18:48:01.178921Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmi2rtswsdfdvk7hlqdw5xwqdkgmxsq66nkh2xk9dl4kv58vtc9mjvmcjgbhdmmkwf kind=VirtualMachineInstance uid=68877fec-9a72-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi2rtswsdfdvk7hlqdw5xwqdkgmxsq66nkh2xk9zl2fd" Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 18:51:46 http: TLS handshake error from 10.129.0.1:49792: EOF 2018/08/07 18:51:56 http: TLS handshake error from 10.129.0.1:49798: EOF 2018/08/07 18:52:06 http: TLS handshake error from 10.129.0.1:49804: EOF 2018/08/07 18:52:16 http: TLS handshake error from 10.129.0.1:49810: EOF 2018/08/07 18:52:26 http: TLS handshake error from 10.129.0.1:49816: EOF 2018/08/07 18:52:36 http: TLS handshake error from 10.129.0.1:49822: EOF 2018/08/07 18:52:46 http: TLS handshake error from 10.129.0.1:49828: EOF 2018/08/07 18:52:56 http: TLS handshake error from 10.129.0.1:49834: EOF level=info timestamp=2018-08-07T18:52:57.814236Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 18:53:06 http: TLS handshake error from 10.129.0.1:49840: EOF 2018/08/07 18:53:16 http: TLS handshake error from 10.129.0.1:49846: EOF 2018/08/07 18:53:26 http: TLS handshake error from 10.129.0.1:49852: EOF 2018/08/07 18:53:36 http: TLS handshake error from 10.129.0.1:49858: EOF 2018/08/07 18:53:46 http: TLS handshake error from 10.129.0.1:49866: EOF 2018/08/07 18:53:56 http: TLS handshake error from 10.129.0.1:49872: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T18:53:30.609153Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:53:30.756633Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:53:36 http: TLS handshake error from 10.128.0.1:41304: EOF level=info timestamp=2018-08-07T18:53:37.749351Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:53:37.775852Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:53:37.803477Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:53:40.981548Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:53:42.147177Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:53:43.558649Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:53:43.575884Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:53:46 http: TLS handshake error from 10.128.0.1:41346: EOF level=info timestamp=2018-08-07T18:53:51.225856Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:53:56 http: TLS handshake error from 10.128.0.1:41388: EOF level=info timestamp=2018-08-07T18:53:57.415639Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T18:54:01.435800Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T18:43:36.435323Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkh5fcvbxgk98xtrx5zhzs6kjpngh kind= uid=cae1d938-9a71-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:43:36.436059Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkh5fcvbxgk98xtrx5zhzs6kjpngh kind= uid=cae1d938-9a71-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:43:36.649127Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkh5fcvbxgk98xtrx5zhzs6kjpngh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkh5fcvbxgk98xtrx5zhzs6kjpngh" level=info timestamp=2018-08-07T18:43:36.712463Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkh5fcvbxgk98xtrx5zhzs6kjpngh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmil8lp52ps4n5nthhjh52hjgmjsk6d5trq4tzkh5fcvbxgk98xtrx5zhzs6kjpngh" level=info timestamp=2018-08-07T18:48:00.429361Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis75d2k7rzdvbbkskqgcxqrv69sxnpsxnxbpzm8lzbkrtk6f459kfghbkqpmtjqn kind= uid=683d44e7-9a72-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:48:00.430240Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis75d2k7rzdvbbkskqgcxqrv69sxnpsxnxbpzm8lzbkrtk6f459kfghbkqpmtjqn kind= uid=683d44e7-9a72-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:48:00.577248Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmis75d2k7rzdvbbkskqgcxqrv69sxnpsxnxbpzm8lzbkrtk6f459kfghbkqpmtjqn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmis75d2k7rzdvbbkskqgcxqrv69sxnpsxnxbpzm8lzbkrtk6f459kfghbkqpmtjqn" level=info timestamp=2018-08-07T18:48:00.627761Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmis75d2k7rzdvbbkskqgcxqrv69sxnpsxnxbpzm8lzbkrtk6f459kfghbkqpmtjqn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmis75d2k7rzdvbbkskqgcxqrv69sxnpsxnxbpzm8lzbkrtk6f459kfghbkqpmtjqn" level=info timestamp=2018-08-07T18:48:00.718092Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmis75d2k7rzdvbbkskqgcxqrv69sxnpsxnxbpzm8lzbkrtk6f459kfghbkqpmtjqn\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmis75d2k7rzdvbbkskqgcxqrv69sxnpsxnxbpzm8lzbkrtk6f459kfghbkqpmtjqn, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 683d44e7-9a72-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmis75d2k7rzdvbbkskqgcxqrv69sxnpsxnxbpzm8lzbkrtk6f459kfghbkqpmtjqn" level=info timestamp=2018-08-07T18:48:00.901237Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2rtswsdfdvk7hlqdw5xwqdkgmxsq66nkh2xk9dl4kv58vtc9mjvmcjgbhdmmkwf kind= uid=68877fec-9a72-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:48:00.901668Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2rtswsdfdvk7hlqdw5xwqdkgmxsq66nkh2xk9dl4kv58vtc9mjvmcjgbhdmmkwf kind= uid=68877fec-9a72-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:48:01.034205Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2rtswsdfdvk7hlqdw5xwqdkgmxsq66nkh2xk9dl4kv58vtc9mjvmcjgbhdmmkwf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2rtswsdfdvk7hlqdw5xwqdkgmxsq66nkh2xk9dl4kv58vtc9mjvmcjgbhdmmkwf" level=info timestamp=2018-08-07T18:48:01.079628Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2rtswsdfdvk7hlqdw5xwqdkgmxsq66nkh2xk9dl4kv58vtc9mjvmcjgbhdmmkwf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2rtswsdfdvk7hlqdw5xwqdkgmxsq66nkh2xk9dl4kv58vtc9mjvmcjgbhdmmkwf" level=info timestamp=2018-08-07T18:51:01.768578Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipcgrfxhzlb4px8cqdw5fv7p2zcfs5nr7svf62fwthvz77ggtm78x9gkk89p7lbf kind= uid=d4555782-9a72-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:51:01.769531Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipcgrfxhzlb4px8cqdw5fv7p2zcfs5nr7svf62fwthvz77ggtm78x9gkk89p7lbf kind= uid=d4555782-9a72-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T18:27:08.427089Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-07T18:27:08.562037Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:08.564833Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:08.663683Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:08.680027Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:08.685407Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmipcgrfxhzlb4px8cqdw5fv7p2zcfs5nr7svf62xc8h7 Pod phase: Pending level=info timestamp=2018-08-07T18:51:05.891472Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T18:51:05.891757Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T18:51:05.893818Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T18:51:17.383118Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T18:51:17.480103Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmipcgrfxhzlb4px8cqdw5fv7p2zcfs5nr7svf62fwthvz77ggtm78x9gkk89p7lbf" level=info timestamp=2018-08-07T18:51:17.486480Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T18:51:17.487513Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.836 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 should attach virt-launcher to it [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:86 Timed out after 90.011s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ level=info timestamp=2018-08-07T18:51:02.083121Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmipcgrfxhzlb4px8cqdw5fv7p2zcfs5nr7svf62fwthvz77ggtm78x9gkk89p7lbf kind=VirtualMachineInstance uid=d4555782-9a72-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmipcgrfxhzlb4px8cqdw5fv7p2zcfs5nr7svf62xc8h7" •••• Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running level=info timestamp=2018-08-07T18:54:57.735755Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 18:55:06 http: TLS handshake error from 10.129.0.1:49914: EOF 2018/08/07 18:55:16 http: TLS handshake error from 10.129.0.1:49920: EOF 2018/08/07 18:55:26 http: TLS handshake error from 10.129.0.1:49926: EOF 2018/08/07 18:55:36 http: TLS handshake error from 10.129.0.1:49932: EOF 2018/08/07 18:55:46 http: TLS handshake error from 10.129.0.1:49938: EOF 2018/08/07 18:55:56 http: TLS handshake error from 10.129.0.1:49944: EOF level=info timestamp=2018-08-07T18:55:57.852522Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 18:56:06 http: TLS handshake error from 10.129.0.1:49950: EOF 2018/08/07 18:56:16 http: TLS handshake error from 10.129.0.1:49956: EOF 2018/08/07 18:56:26 http: TLS handshake error from 10.129.0.1:49962: EOF 2018/08/07 18:56:36 http: TLS handshake error from 10.129.0.1:49968: EOF 2018/08/07 18:56:46 http: TLS handshake error from 10.129.0.1:49976: EOF 2018/08/07 18:56:56 http: TLS handshake error from 10.129.0.1:49982: EOF level=info timestamp=2018-08-07T18:56:57.853191Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T18:56:31.836491Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:56:31.898928Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:56:31.934115Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:56:34.296997Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:56:36 http: TLS handshake error from 10.128.0.1:42122: EOF level=info timestamp=2018-08-07T18:56:39.147456Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:56:39.204463Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:56:39.239062Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:56:43.427518Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:56:44.483309Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:56:45.362548Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:56:45.362524Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:56:46 http: TLS handshake error from 10.128.0.1:42164: EOF level=info timestamp=2018-08-07T18:56:54.669812Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:56:56 http: TLS handshake error from 10.128.0.1:42206: EOF Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T18:51:01.768578Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipcgrfxhzlb4px8cqdw5fv7p2zcfs5nr7svf62fwthvz77ggtm78x9gkk89p7lbf kind= uid=d4555782-9a72-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:51:01.769531Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipcgrfxhzlb4px8cqdw5fv7p2zcfs5nr7svf62fwthvz77ggtm78x9gkk89p7lbf kind= uid=d4555782-9a72-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:54:02.427663Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipcgrfxhzlb4px8cqdw5fv7p2zcfs5nr7svf62fwthvz77ggtm78x9gkk89p7lbf\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmipcgrfxhzlb4px8cqdw5fv7p2zcfs5nr7svf62fwthvz77ggtm78x9gkk89p7lbf, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d4555782-9a72-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipcgrfxhzlb4px8cqdw5fv7p2zcfs5nr7svf62fwthvz77ggtm78x9gkk89p7lbf" level=info timestamp=2018-08-07T18:54:03.222362Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibxd4rnwh7c2r6zljdmv4wkqzjv789mjd5jxwvcqjjjvmxl6kgc42s67rv94pp8t kind= uid=407cef7f-9a73-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:54:03.223023Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibxd4rnwh7c2r6zljdmv4wkqzjv789mjd5jxwvcqjjjvmxl6kgc42s67rv94pp8t kind= uid=407cef7f-9a73-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:54:03.395536Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibxd4rnwh7c2r6zljdmv4wkqzjv789mjd5jxwvcqjjjvmxl6kgc42s67rv94pp8t\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibxd4rnwh7c2r6zljdmv4wkqzjv789mjd5jxwvcqjjjvmxl6kgc42s67rv94pp8t" level=info timestamp=2018-08-07T18:54:03.452684Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibxd4rnwh7c2r6zljdmv4wkqzjv789mjd5jxwvcqjjjvmxl6kgc42s67rv94pp8t\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibxd4rnwh7c2r6zljdmv4wkqzjv789mjd5jxwvcqjjjvmxl6kgc42s67rv94pp8t" level=info timestamp=2018-08-07T18:54:03.527669Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibxd4rnwh7c2r6zljdmv4wkqzjv789mjd5jxwvcqjjjvmxl6kgc42s67rv94pp8t\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmibxd4rnwh7c2r6zljdmv4wkqzjv789mjd5jxwvcqjjjvmxl6kgc42s67rv94pp8t, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 407cef7f-9a73-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibxd4rnwh7c2r6zljdmv4wkqzjv789mjd5jxwvcqjjjvmxl6kgc42s67rv94pp8t" level=info timestamp=2018-08-07T18:54:03.710332Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihgqsvmvtkq5g76gp895b6jmrrxw26n7h2wmsk2dp9fbc7mv4j8dknl78p7tv486 kind= uid=40c84459-9a73-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:54:03.710679Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihgqsvmvtkq5g76gp895b6jmrrxw26n7h2wmsk2dp9fbc7mv4j8dknl78p7tv486 kind= uid=40c84459-9a73-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:54:03.842324Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihgqsvmvtkq5g76gp895b6jmrrxw26n7h2wmsk2dp9fbc7mv4j8dknl78p7tv486\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihgqsvmvtkq5g76gp895b6jmrrxw26n7h2wmsk2dp9fbc7mv4j8dknl78p7tv486" level=info timestamp=2018-08-07T18:54:04.015245Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihgqsvmvtkq5g76gp895b6jmrrxw26n7h2wmsk2dp9fbc7mv4j8dknl78p7tv486\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihgqsvmvtkq5g76gp895b6jmrrxw26n7h2wmsk2dp9fbc7mv4j8dknl78p7tv486, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 40c84459-9a73-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihgqsvmvtkq5g76gp895b6jmrrxw26n7h2wmsk2dp9fbc7mv4j8dknl78p7tv486" level=info timestamp=2018-08-07T18:54:04.219999Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisswllqf9lgz85pfljwm8cwg9fgv4z6svt5x2prph5zjsp669w4n5crqvkbfvqdq kind= uid=41152d31-9a73-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:54:04.220328Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisswllqf9lgz85pfljwm8cwg9fgv4z6svt5x2prph5zjsp669w4n5crqvkbfvqdq kind= uid=41152d31-9a73-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:54:04.479075Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisswllqf9lgz85pfljwm8cwg9fgv4z6svt5x2prph5zjsp669w4n5crqvkbfvqdq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisswllqf9lgz85pfljwm8cwg9fgv4z6svt5x2prph5zjsp669w4n5crqvkbfvqdq" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T18:27:08.427089Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-07T18:27:08.562037Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:08.564833Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:08.663683Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:08.680027Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:08.685407Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmisswllqf9lgz85pfljwm8cwg9fgv4z6svt5x2pwd2sr Pod phase: Pending level=info timestamp=2018-08-07T18:54:09.059203Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T18:54:09.059651Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T18:54:09.061319Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T18:54:19.563280Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T18:54:19.597317Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmisswllqf9lgz85pfljwm8cwg9fgv4z6svt5x2prph5zjsp669w4n5crqvkbfvqdq" level=info timestamp=2018-08-07T18:54:19.599142Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T18:54:19.599638Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" ------------------------------ • Failure [180.830 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:174 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Alpine as first boot [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Timed out after 90.013s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ STEP: defining a VirtualMachineInstance with an Alpine disk STEP: adding a Cirros Disk STEP: setting boot order STEP: starting VirtualMachineInstance STEP: Waiting the VirtualMachineInstance start level=info timestamp=2018-08-07T18:54:04.476354Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmisswllqf9lgz85pfljwm8cwg9fgv4z6svt5x2prph5zjsp669w4n5crqvkbfvqdq kind=VirtualMachineInstance uid=41152d31-9a73-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmisswllqf9lgz85pfljwm8cwg9fgv4z6svt5x2pwd2sr" Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 18:57:56 http: TLS handshake error from 10.129.0.1:50018: EOF 2018/08/07 18:58:06 http: TLS handshake error from 10.129.0.1:50024: EOF 2018/08/07 18:58:16 http: TLS handshake error from 10.129.0.1:50030: EOF 2018/08/07 18:58:26 http: TLS handshake error from 10.129.0.1:50036: EOF 2018/08/07 18:58:36 http: TLS handshake error from 10.129.0.1:50042: EOF 2018/08/07 18:58:46 http: TLS handshake error from 10.129.0.1:50048: EOF 2018/08/07 18:58:56 http: TLS handshake error from 10.129.0.1:50054: EOF level=info timestamp=2018-08-07T18:58:57.782781Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 18:59:06 http: TLS handshake error from 10.129.0.1:50060: EOF 2018/08/07 18:59:16 http: TLS handshake error from 10.129.0.1:50066: EOF 2018/08/07 18:59:26 http: TLS handshake error from 10.129.0.1:50072: EOF 2018/08/07 18:59:36 http: TLS handshake error from 10.129.0.1:50078: EOF 2018/08/07 18:59:46 http: TLS handshake error from 10.129.0.1:50086: EOF 2018/08/07 18:59:56 http: TLS handshake error from 10.129.0.1:50092: EOF level=info timestamp=2018-08-07T18:59:57.796797Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T18:59:33.022527Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:59:33.102177Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:59:33.139183Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:59:36 http: TLS handshake error from 10.128.0.1:42914: EOF level=info timestamp=2018-08-07T18:59:37.856654Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:59:40.275883Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:59:40.302093Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:59:40.326659Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:59:44.687963Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:59:46 http: TLS handshake error from 10.128.0.1:42962: EOF level=info timestamp=2018-08-07T18:59:47.231114Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:59:47.233227Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T18:59:48.021623Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 18:59:56 http: TLS handshake error from 10.128.0.1:43010: EOF level=info timestamp=2018-08-07T18:59:58.223342Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T18:54:03.222362Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibxd4rnwh7c2r6zljdmv4wkqzjv789mjd5jxwvcqjjjvmxl6kgc42s67rv94pp8t kind= uid=407cef7f-9a73-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:54:03.223023Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibxd4rnwh7c2r6zljdmv4wkqzjv789mjd5jxwvcqjjjvmxl6kgc42s67rv94pp8t kind= uid=407cef7f-9a73-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:54:03.395536Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibxd4rnwh7c2r6zljdmv4wkqzjv789mjd5jxwvcqjjjvmxl6kgc42s67rv94pp8t\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibxd4rnwh7c2r6zljdmv4wkqzjv789mjd5jxwvcqjjjvmxl6kgc42s67rv94pp8t" level=info timestamp=2018-08-07T18:54:03.452684Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibxd4rnwh7c2r6zljdmv4wkqzjv789mjd5jxwvcqjjjvmxl6kgc42s67rv94pp8t\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibxd4rnwh7c2r6zljdmv4wkqzjv789mjd5jxwvcqjjjvmxl6kgc42s67rv94pp8t" level=info timestamp=2018-08-07T18:54:03.527669Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibxd4rnwh7c2r6zljdmv4wkqzjv789mjd5jxwvcqjjjvmxl6kgc42s67rv94pp8t\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmibxd4rnwh7c2r6zljdmv4wkqzjv789mjd5jxwvcqjjjvmxl6kgc42s67rv94pp8t, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 407cef7f-9a73-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibxd4rnwh7c2r6zljdmv4wkqzjv789mjd5jxwvcqjjjvmxl6kgc42s67rv94pp8t" level=info timestamp=2018-08-07T18:54:03.710332Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihgqsvmvtkq5g76gp895b6jmrrxw26n7h2wmsk2dp9fbc7mv4j8dknl78p7tv486 kind= uid=40c84459-9a73-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:54:03.710679Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihgqsvmvtkq5g76gp895b6jmrrxw26n7h2wmsk2dp9fbc7mv4j8dknl78p7tv486 kind= uid=40c84459-9a73-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:54:03.842324Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihgqsvmvtkq5g76gp895b6jmrrxw26n7h2wmsk2dp9fbc7mv4j8dknl78p7tv486\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihgqsvmvtkq5g76gp895b6jmrrxw26n7h2wmsk2dp9fbc7mv4j8dknl78p7tv486" level=info timestamp=2018-08-07T18:54:04.015245Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihgqsvmvtkq5g76gp895b6jmrrxw26n7h2wmsk2dp9fbc7mv4j8dknl78p7tv486\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihgqsvmvtkq5g76gp895b6jmrrxw26n7h2wmsk2dp9fbc7mv4j8dknl78p7tv486, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 40c84459-9a73-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihgqsvmvtkq5g76gp895b6jmrrxw26n7h2wmsk2dp9fbc7mv4j8dknl78p7tv486" level=info timestamp=2018-08-07T18:54:04.219999Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisswllqf9lgz85pfljwm8cwg9fgv4z6svt5x2prph5zjsp669w4n5crqvkbfvqdq kind= uid=41152d31-9a73-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:54:04.220328Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisswllqf9lgz85pfljwm8cwg9fgv4z6svt5x2prph5zjsp669w4n5crqvkbfvqdq kind= uid=41152d31-9a73-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T18:54:04.479075Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisswllqf9lgz85pfljwm8cwg9fgv4z6svt5x2prph5zjsp669w4n5crqvkbfvqdq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisswllqf9lgz85pfljwm8cwg9fgv4z6svt5x2prph5zjsp669w4n5crqvkbfvqdq" level=info timestamp=2018-08-07T18:57:04.820725Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisswllqf9lgz85pfljwm8cwg9fgv4z6svt5x2prph5zjsp669w4n5crqvkbfvqdq\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmisswllqf9lgz85pfljwm8cwg9fgv4z6svt5x2prph5zjsp669w4n5crqvkbfvqdq, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 41152d31-9a73-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisswllqf9lgz85pfljwm8cwg9fgv4z6svt5x2prph5zjsp669w4n5crqvkbfvqdq" level=info timestamp=2018-08-07T18:57:05.000216Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimwfjcwt69954jx54cjqnvwqwlmjt54zmvcz5c99vfngx44bw28rw4k7rjhfks6r kind= uid=acd5530b-9a73-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:57:05.000967Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimwfjcwt69954jx54cjqnvwqwlmjt54zmvcz5c99vfngx44bw28rw4k7rjhfks6r kind= uid=acd5530b-9a73-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T18:27:08.427089Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-07T18:27:08.562037Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:08.564833Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:08.663683Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:08.680027Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:08.685407Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmimwfjcwt69954jx54cjqnvwqwlmjt54zmvcz5cxv2b5 Pod phase: Pending level=info timestamp=2018-08-07T18:57:09.174260Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T18:57:09.174779Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T18:57:09.176603Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T18:57:20.240139Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T18:57:20.323735Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmimwfjcwt69954jx54cjqnvwqwlmjt54zmvcz5c99vfngx44bw28rw4k7rjhfks6r" level=info timestamp=2018-08-07T18:57:20.326382Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T18:57:20.326919Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.837 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:174 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Cirros as first boot [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Timed out after 90.012s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ STEP: defining a VirtualMachineInstance with an Alpine disk STEP: adding a Cirros Disk STEP: setting boot order STEP: starting VirtualMachineInstance STEP: Waiting the VirtualMachineInstance start level=info timestamp=2018-08-07T18:57:05.277854Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmimwfjcwt69954jx54cjqnvwqwlmjt54zmvcz5c99vfngx44bw28rw4k7rjhfks6r kind=VirtualMachineInstance uid=acd5530b-9a73-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmimwfjcwt69954jx54cjqnvwqwlmjt54zmvcz5cxv2b5" • [SLOW TEST:60.393 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:205 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:206 should retry starting the VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:207 ------------------------------ •! Panic [60.430 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:205 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:206 should log warning and proceed once the secret is there [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:237 Test Panicked runtime error: invalid memory address or nil pointer dereference /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/panic.go:505 Full Stack Trace /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/panic.go:505 +0x229 kubevirt.io/kubevirt/tests_test.glob..func16.3.9.1.2() /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:258 +0x431 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*runner).runSync(0xc420856060, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/tests/tests_suite_test.go:43 +0xaa testing.tRunner(0xc4205705a0, 0x1432278) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:777 +0xd0 created by testing.(*T).Run /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:824 +0x2e0 ------------------------------ STEP: Starting a VirtualMachineInstance STEP: Checking that VirtualMachineInstance start failed level=info timestamp=2018-08-07T19:01:06.473250Z pos=utils.go:257 component=tests namespace=kubevirt-test-default name=testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv kind=VirtualMachineInstance uid=3caa84e2-9a74-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k698w6w" Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 19:03:16 http: TLS handshake error from 10.129.0.1:50214: EOF 2018/08/07 19:03:26 http: TLS handshake error from 10.129.0.1:50220: EOF level=info timestamp=2018-08-07T19:03:27.710830Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:03:36 http: TLS handshake error from 10.129.0.1:50226: EOF 2018/08/07 19:03:46 http: TLS handshake error from 10.129.0.1:50232: EOF 2018/08/07 19:03:56 http: TLS handshake error from 10.129.0.1:50238: EOF level=info timestamp=2018-08-07T19:03:57.717281Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:04:06 http: TLS handshake error from 10.129.0.1:50244: EOF 2018/08/07 19:04:16 http: TLS handshake error from 10.129.0.1:50250: EOF 2018/08/07 19:04:26 http: TLS handshake error from 10.129.0.1:50256: EOF 2018/08/07 19:04:36 http: TLS handshake error from 10.129.0.1:50262: EOF 2018/08/07 19:04:46 http: TLS handshake error from 10.129.0.1:50268: EOF 2018/08/07 19:04:56 http: TLS handshake error from 10.129.0.1:50274: EOF level=info timestamp=2018-08-07T19:04:57.711047Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:05:06 http: TLS handshake error from 10.129.0.1:50280: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T19:04:19.748455Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:04:19.750635Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:04:23.337568Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:04:26 http: TLS handshake error from 10.128.0.1:44196: EOF level=info timestamp=2018-08-07T19:04:27.497896Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T19:04:33.534312Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:04:36 http: TLS handshake error from 10.128.0.1:44248: EOF level=info timestamp=2018-08-07T19:04:43.707370Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:04:46 http: TLS handshake error from 10.128.0.1:44296: EOF level=info timestamp=2018-08-07T19:04:47.062796Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:04:50.023078Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:04:50.026226Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:04:53.904520Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:04:56 http: TLS handshake error from 10.128.0.1:44344: EOF level=info timestamp=2018-08-07T19:05:04.083294Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:05:06 http: TLS handshake error from 10.128.0.1:44394: EOF Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T18:57:04.820725Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisswllqf9lgz85pfljwm8cwg9fgv4z6svt5x2prph5zjsp669w4n5crqvkbfvqdq\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmisswllqf9lgz85pfljwm8cwg9fgv4z6svt5x2prph5zjsp669w4n5crqvkbfvqdq, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 41152d31-9a73-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisswllqf9lgz85pfljwm8cwg9fgv4z6svt5x2prph5zjsp669w4n5crqvkbfvqdq" level=info timestamp=2018-08-07T18:57:05.000216Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimwfjcwt69954jx54cjqnvwqwlmjt54zmvcz5c99vfngx44bw28rw4k7rjhfks6r kind= uid=acd5530b-9a73-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T18:57:05.000967Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimwfjcwt69954jx54cjqnvwqwlmjt54zmvcz5c99vfngx44bw28rw4k7rjhfks6r kind= uid=acd5530b-9a73-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:00:05.666211Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimwfjcwt69954jx54cjqnvwqwlmjt54zmvcz5c99vfngx44bw28rw4k7rjhfks6r\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmimwfjcwt69954jx54cjqnvwqwlmjt54zmvcz5c99vfngx44bw28rw4k7rjhfks6r, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: acd5530b-9a73-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimwfjcwt69954jx54cjqnvwqwlmjt54zmvcz5c99vfngx44bw28rw4k7rjhfks6r" level=info timestamp=2018-08-07T19:00:05.698224Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimwfjcwt69954jx54cjqnvwqwlmjt54zmvcz5c99vfngx44bw28rw4k7rjhfks6r\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmimwfjcwt69954jx54cjqnvwqwlmjt54zmvcz5c99vfngx44bw28rw4k7rjhfks6r, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: acd5530b-9a73-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimwfjcwt69954jx54cjqnvwqwlmjt54zmvcz5c99vfngx44bw28rw4k7rjhfks6r" level=info timestamp=2018-08-07T19:00:05.868887Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwl6zprvjzwggxml97pkp6z7pstcrhmtqmgc8lrncvwnk6pnvmzhkft2kcprp7sd kind= uid=18a3de46-9a74-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:00:05.869824Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwl6zprvjzwggxml97pkp6z7pstcrhmtqmgc8lrncvwnk6pnvmzhkft2kcprp7sd kind= uid=18a3de46-9a74-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:01:06.123123Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwl6zprvjzwggxml97pkp6z7pstcrhmtqmgc8lrncvwnk6pnvmzhkft2kcprp7sd\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwl6zprvjzwggxml97pkp6z7pstcrhmtqmgc8lrncvwnk6pnvmzhkft2kcprp7sd, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 18a3de46-9a74-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwl6zprvjzwggxml97pkp6z7pstcrhmtqmgc8lrncvwnk6pnvmzhkft2kcprp7sd" level=info timestamp=2018-08-07T19:01:06.295866Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv kind= uid=3caa84e2-9a74-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:01:06.296361Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv kind= uid=3caa84e2-9a74-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:01:06.419616Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv" level=info timestamp=2018-08-07T19:02:06.488944Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3caa84e2-9a74-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv" level=info timestamp=2018-08-07T19:02:06.661986Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz kind= uid=60a44fff-9a74-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:02:06.662549Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz kind= uid=60a44fff-9a74-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:02:06.823155Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T18:27:08.427089Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-07T18:27:08.562037Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:08.564833Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:08.663683Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:08.680027Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:08.685407Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5mx4c2 Pod phase: Pending level=info timestamp=2018-08-07T19:02:10.699076Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:02:10.699351Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T19:02:10.701155Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:02:21.972877Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:02:22.027968Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz" level=info timestamp=2018-08-07T19:02:22.030603Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:02:22.031011Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.782 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-launcher crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:285 should be stopped and have Failed phase [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:286 Timed out after 90.008s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ level=info timestamp=2018-08-07T19:02:06.889517Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz kind=VirtualMachineInstance uid=60a44fff-9a74-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5mx4c2" Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 19:06:16 http: TLS handshake error from 10.129.0.1:50324: EOF 2018/08/07 19:06:26 http: TLS handshake error from 10.129.0.1:50330: EOF level=info timestamp=2018-08-07T19:06:27.715370Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:06:36 http: TLS handshake error from 10.129.0.1:50336: EOF 2018/08/07 19:06:46 http: TLS handshake error from 10.129.0.1:50342: EOF 2018/08/07 19:06:56 http: TLS handshake error from 10.129.0.1:50348: EOF level=info timestamp=2018-08-07T19:06:57.856863Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:07:06 http: TLS handshake error from 10.129.0.1:50354: EOF 2018/08/07 19:07:16 http: TLS handshake error from 10.129.0.1:50360: EOF 2018/08/07 19:07:26 http: TLS handshake error from 10.129.0.1:50366: EOF level=info timestamp=2018-08-07T19:07:27.829935Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:07:36 http: TLS handshake error from 10.129.0.1:50372: EOF 2018/08/07 19:07:46 http: TLS handshake error from 10.129.0.1:50378: EOF 2018/08/07 19:07:56 http: TLS handshake error from 10.129.0.1:50384: EOF 2018/08/07 19:08:06 http: TLS handshake error from 10.129.0.1:50390: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T19:07:21.851897Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:07:21.890066Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:07:26 http: TLS handshake error from 10.128.0.1:45112: EOF level=info timestamp=2018-08-07T19:07:26.874595Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:07:36 http: TLS handshake error from 10.128.0.1:45158: EOF level=info timestamp=2018-08-07T19:07:37.071807Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:07:46 http: TLS handshake error from 10.128.0.1:45200: EOF level=info timestamp=2018-08-07T19:07:47.275945Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:07:48.453846Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:07:52.154945Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:07:52.155064Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:07:56 http: TLS handshake error from 10.128.0.1:45242: EOF level=info timestamp=2018-08-07T19:07:57.514497Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:07:57.529368Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:08:06 http: TLS handshake error from 10.128.0.1:45290: EOF Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T19:00:05.666211Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimwfjcwt69954jx54cjqnvwqwlmjt54zmvcz5c99vfngx44bw28rw4k7rjhfks6r\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmimwfjcwt69954jx54cjqnvwqwlmjt54zmvcz5c99vfngx44bw28rw4k7rjhfks6r, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: acd5530b-9a73-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimwfjcwt69954jx54cjqnvwqwlmjt54zmvcz5c99vfngx44bw28rw4k7rjhfks6r" level=info timestamp=2018-08-07T19:00:05.698224Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimwfjcwt69954jx54cjqnvwqwlmjt54zmvcz5c99vfngx44bw28rw4k7rjhfks6r\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmimwfjcwt69954jx54cjqnvwqwlmjt54zmvcz5c99vfngx44bw28rw4k7rjhfks6r, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: acd5530b-9a73-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimwfjcwt69954jx54cjqnvwqwlmjt54zmvcz5c99vfngx44bw28rw4k7rjhfks6r" level=info timestamp=2018-08-07T19:00:05.868887Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwl6zprvjzwggxml97pkp6z7pstcrhmtqmgc8lrncvwnk6pnvmzhkft2kcprp7sd kind= uid=18a3de46-9a74-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:00:05.869824Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwl6zprvjzwggxml97pkp6z7pstcrhmtqmgc8lrncvwnk6pnvmzhkft2kcprp7sd kind= uid=18a3de46-9a74-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:01:06.123123Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwl6zprvjzwggxml97pkp6z7pstcrhmtqmgc8lrncvwnk6pnvmzhkft2kcprp7sd\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwl6zprvjzwggxml97pkp6z7pstcrhmtqmgc8lrncvwnk6pnvmzhkft2kcprp7sd, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 18a3de46-9a74-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwl6zprvjzwggxml97pkp6z7pstcrhmtqmgc8lrncvwnk6pnvmzhkft2kcprp7sd" level=info timestamp=2018-08-07T19:01:06.295866Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv kind= uid=3caa84e2-9a74-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:01:06.296361Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv kind= uid=3caa84e2-9a74-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:01:06.419616Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv" level=info timestamp=2018-08-07T19:02:06.488944Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3caa84e2-9a74-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv" level=info timestamp=2018-08-07T19:02:06.661986Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz kind= uid=60a44fff-9a74-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:02:06.662549Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz kind= uid=60a44fff-9a74-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:02:06.823155Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz" level=info timestamp=2018-08-07T19:05:07.296866Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 60a44fff-9a74-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz" level=info timestamp=2018-08-07T19:05:07.503909Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik2tj9r4k56pdv8z8nmdv8p5h8vdntj8t4s5rh4htpmrvm992k52nft72n7kwgtl kind= uid=cc6eb89a-9a74-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:05:07.504872Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik2tj9r4k56pdv8z8nmdv8p5h8vdntj8t4s5rh4htpmrvm992k52nft72n7kwgtl kind= uid=cc6eb89a-9a74-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T18:27:08.427089Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-07T18:27:08.562037Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:08.564833Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:08.663683Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:08.680027Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:08.685407Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmik2tj9r4k56pdv8z8nmdv8p5h8vdntj8t4s5rhjw25v Pod phase: Pending level=info timestamp=2018-08-07T19:05:11.891663Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:05:11.893061Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T19:05:11.897237Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:05:24.253888Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:05:24.310734Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmik2tj9r4k56pdv8z8nmdv8p5h8vdntj8t4s5rh4htpmrvm992k52nft72n7kwgtl" level=info timestamp=2018-08-07T19:05:24.313090Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:05:24.313614Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.860 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:309 should recover and continue management [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:310 Timed out after 90.012s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ level=info timestamp=2018-08-07T19:05:07.733602Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmik2tj9r4k56pdv8z8nmdv8p5h8vdntj8t4s5rh4htpmrvm992k52nft72n7kwgtl kind=VirtualMachineInstance uid=cc6eb89a-9a74-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmik2tj9r4k56pdv8z8nmdv8p5h8vdntj8t4s5rhjw25v" • [SLOW TEST:101.164 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler is responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:340 should indicate that a node is ready for vmis /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:341 ------------------------------ Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 19:10:36 http: TLS handshake error from 10.129.0.1:50482: EOF 2018/08/07 19:10:46 http: TLS handshake error from 10.129.0.1:50488: EOF 2018/08/07 19:10:56 http: TLS handshake error from 10.129.0.1:50494: EOF 2018/08/07 19:11:06 http: TLS handshake error from 10.129.0.1:50500: EOF 2018/08/07 19:11:16 http: TLS handshake error from 10.129.0.1:50506: EOF 2018/08/07 19:11:26 http: TLS handshake error from 10.129.0.1:50512: EOF 2018/08/07 19:11:36 http: TLS handshake error from 10.129.0.1:50518: EOF 2018/08/07 19:11:46 http: TLS handshake error from 10.129.0.1:50526: EOF 2018/08/07 19:11:56 http: TLS handshake error from 10.129.0.1:50532: EOF 2018/08/07 19:12:06 http: TLS handshake error from 10.129.0.1:50538: EOF 2018/08/07 19:12:16 http: TLS handshake error from 10.129.0.1:50544: EOF 2018/08/07 19:12:26 http: TLS handshake error from 10.129.0.1:50550: EOF level=info timestamp=2018-08-07T19:12:27.719115Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:12:36 http: TLS handshake error from 10.129.0.1:50556: EOF 2018/08/07 19:12:46 http: TLS handshake error from 10.129.0.1:50562: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T19:12:02.203107Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:12:06 http: TLS handshake error from 10.128.0.1:46462: EOF level=info timestamp=2018-08-07T19:12:12.419007Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:12:16.444057Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T19:12:16.448459Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/08/07 19:12:16 http: TLS handshake error from 10.128.0.1:46504: EOF level=info timestamp=2018-08-07T19:12:20.413563Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:12:22.614935Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:12:24.805058Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:12:24.846652Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:12:26 http: TLS handshake error from 10.128.0.1:46546: EOF level=info timestamp=2018-08-07T19:12:32.789945Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:12:36 http: TLS handshake error from 10.128.0.1:46592: EOF level=info timestamp=2018-08-07T19:12:42.904849Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:12:46 http: TLS handshake error from 10.128.0.1:46634: EOF Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T19:01:06.295866Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv kind= uid=3caa84e2-9a74-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:01:06.296361Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv kind= uid=3caa84e2-9a74-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:01:06.419616Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv" level=info timestamp=2018-08-07T19:02:06.488944Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3caa84e2-9a74-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmicklwn5bv92m5754rwth6lcfqmdt5rwcnt98k6gtv6grw975gpx6clctrmkhvbzv" level=info timestamp=2018-08-07T19:02:06.661986Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz kind= uid=60a44fff-9a74-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:02:06.662549Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz kind= uid=60a44fff-9a74-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:02:06.823155Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz" level=info timestamp=2018-08-07T19:05:07.296866Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 60a44fff-9a74-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz" level=info timestamp=2018-08-07T19:05:07.503909Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik2tj9r4k56pdv8z8nmdv8p5h8vdntj8t4s5rh4htpmrvm992k52nft72n7kwgtl kind= uid=cc6eb89a-9a74-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:05:07.504872Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik2tj9r4k56pdv8z8nmdv8p5h8vdntj8t4s5rh4htpmrvm992k52nft72n7kwgtl kind= uid=cc6eb89a-9a74-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:08:08.186848Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmik2tj9r4k56pdv8z8nmdv8p5h8vdntj8t4s5rh4htpmrvm992k52nft72n7kwgtl\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmik2tj9r4k56pdv8z8nmdv8p5h8vdntj8t4s5rh4htpmrvm992k52nft72n7kwgtl, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: cc6eb89a-9a74-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmik2tj9r4k56pdv8z8nmdv8p5h8vdntj8t4s5rh4htpmrvm992k52nft72n7kwgtl" level=info timestamp=2018-08-07T19:09:49.462563Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiq29bvkqkrp7r6k7fh8b79dl5w5v9m2z2ttmtdw79k77wg7r9l99n9qwphnczklc kind= uid=747989c5-9a75-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:09:49.463528Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiq29bvkqkrp7r6k7fh8b79dl5w5v9m2z2ttmtdw79k77wg7r9l99n9qwphnczklc kind= uid=747989c5-9a75-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:09:49.647013Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq29bvkqkrp7r6k7fh8b79dl5w5v9m2z2ttmtdw79k77wg7r9l99n9qwphnczklc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq29bvkqkrp7r6k7fh8b79dl5w5v9m2z2ttmtdw79k77wg7r9l99n9qwphnczklc" level=info timestamp=2018-08-07T19:09:49.694853Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq29bvkqkrp7r6k7fh8b79dl5w5v9m2z2ttmtdw79k77wg7r9l99n9qwphnczklc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq29bvkqkrp7r6k7fh8b79dl5w5v9m2z2ttmtdw79k77wg7r9l99n9qwphnczklc" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T18:27:08.427089Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-07T18:27:08.562037Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:08.564833Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:08.663683Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:08.680027Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:08.685407Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmiq29bvkqkrp7r6k7fh8b79dl5w5v9m2z2ttmtdxrsg7 Pod phase: Pending level=info timestamp=2018-08-07T19:09:53.699551Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:09:53.699908Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T19:09:53.701848Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:10:04.927944Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:10:05.021773Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiq29bvkqkrp7r6k7fh8b79dl5w5v9m2z2ttmtdw79k77wg7r9l99n9qwphnczklc" level=info timestamp=2018-08-07T19:10:05.023886Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:10:05.024302Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [180.777 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler is not responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:371 the node controller should react [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:410 Timed out after 90.012s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ level=info timestamp=2018-08-07T19:09:49.664027Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmiq29bvkqkrp7r6k7fh8b79dl5w5v9m2z2ttmtdw79k77wg7r9l99n9qwphnczklc kind=VirtualMachineInstance uid=747989c5-9a75-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmiq29bvkqkrp7r6k7fh8b79dl5w5v9m2z2ttmtdxrsg7" Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 19:13:36 http: TLS handshake error from 10.129.0.1:50592: EOF 2018/08/07 19:13:46 http: TLS handshake error from 10.129.0.1:50598: EOF 2018/08/07 19:13:56 http: TLS handshake error from 10.129.0.1:50604: EOF 2018/08/07 19:14:06 http: TLS handshake error from 10.129.0.1:50610: EOF 2018/08/07 19:14:16 http: TLS handshake error from 10.129.0.1:50616: EOF 2018/08/07 19:14:26 http: TLS handshake error from 10.129.0.1:50622: EOF 2018/08/07 19:14:36 http: TLS handshake error from 10.129.0.1:50628: EOF 2018/08/07 19:14:46 http: TLS handshake error from 10.129.0.1:50634: EOF 2018/08/07 19:14:56 http: TLS handshake error from 10.129.0.1:50642: EOF level=info timestamp=2018-08-07T19:14:57.746765Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:15:06 http: TLS handshake error from 10.129.0.1:50648: EOF 2018/08/07 19:15:16 http: TLS handshake error from 10.129.0.1:50654: EOF 2018/08/07 19:15:26 http: TLS handshake error from 10.129.0.1:50660: EOF 2018/08/07 19:15:36 http: TLS handshake error from 10.129.0.1:50666: EOF 2018/08/07 19:15:46 http: TLS handshake error from 10.129.0.1:50672: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running 2018/08/07 19:14:56 http: TLS handshake error from 10.128.0.1:47218: EOF level=info timestamp=2018-08-07T19:15:05.728510Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:15:06 http: TLS handshake error from 10.128.0.1:47268: EOF level=info timestamp=2018-08-07T19:15:15.993611Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:15:16 http: TLS handshake error from 10.128.0.1:47312: EOF level=info timestamp=2018-08-07T19:15:21.739526Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:15:26.118025Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:15:26 http: TLS handshake error from 10.128.0.1:47356: EOF level=info timestamp=2018-08-07T19:15:26.705045Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:15:26.716956Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:15:27.388417Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T19:15:36.308179Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:15:36 http: TLS handshake error from 10.128.0.1:47498: EOF level=info timestamp=2018-08-07T19:15:46.510985Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:15:46 http: TLS handshake error from 10.128.0.1:47540: EOF Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T19:02:06.661986Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz kind= uid=60a44fff-9a74-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:02:06.662549Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz kind= uid=60a44fff-9a74-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:02:06.823155Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz" level=info timestamp=2018-08-07T19:05:07.296866Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 60a44fff-9a74-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirg2tmg69z5r9czllncwx897bpd856xlsjqdw5ggzmwt7g2zwm4kbw9d2vrfwlbz" level=info timestamp=2018-08-07T19:05:07.503909Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik2tj9r4k56pdv8z8nmdv8p5h8vdntj8t4s5rh4htpmrvm992k52nft72n7kwgtl kind= uid=cc6eb89a-9a74-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:05:07.504872Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik2tj9r4k56pdv8z8nmdv8p5h8vdntj8t4s5rh4htpmrvm992k52nft72n7kwgtl kind= uid=cc6eb89a-9a74-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:08:08.186848Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmik2tj9r4k56pdv8z8nmdv8p5h8vdntj8t4s5rh4htpmrvm992k52nft72n7kwgtl\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmik2tj9r4k56pdv8z8nmdv8p5h8vdntj8t4s5rh4htpmrvm992k52nft72n7kwgtl, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: cc6eb89a-9a74-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmik2tj9r4k56pdv8z8nmdv8p5h8vdntj8t4s5rh4htpmrvm992k52nft72n7kwgtl" level=info timestamp=2018-08-07T19:09:49.462563Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiq29bvkqkrp7r6k7fh8b79dl5w5v9m2z2ttmtdw79k77wg7r9l99n9qwphnczklc kind= uid=747989c5-9a75-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:09:49.463528Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiq29bvkqkrp7r6k7fh8b79dl5w5v9m2z2ttmtdw79k77wg7r9l99n9qwphnczklc kind= uid=747989c5-9a75-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:09:49.647013Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq29bvkqkrp7r6k7fh8b79dl5w5v9m2z2ttmtdw79k77wg7r9l99n9qwphnczklc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq29bvkqkrp7r6k7fh8b79dl5w5v9m2z2ttmtdw79k77wg7r9l99n9qwphnczklc" level=info timestamp=2018-08-07T19:09:49.694853Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq29bvkqkrp7r6k7fh8b79dl5w5v9m2z2ttmtdw79k77wg7r9l99n9qwphnczklc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq29bvkqkrp7r6k7fh8b79dl5w5v9m2z2ttmtdw79k77wg7r9l99n9qwphnczklc" level=info timestamp=2018-08-07T19:12:50.148701Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq29bvkqkrp7r6k7fh8b79dl5w5v9m2z2ttmtdw79k77wg7r9l99n9qwphnczklc\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiq29bvkqkrp7r6k7fh8b79dl5w5v9m2z2ttmtdw79k77wg7r9l99n9qwphnczklc, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 747989c5-9a75-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq29bvkqkrp7r6k7fh8b79dl5w5v9m2z2ttmtdw79k77wg7r9l99n9qwphnczklc" level=info timestamp=2018-08-07T19:12:50.396718Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib8cwndg4qg87w7s5sg6crjmhmzn5zlj2n77j57524fdsb2hvdskbdd62755mxh7 kind= uid=e054eaac-9a75-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:12:50.398218Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib8cwndg4qg87w7s5sg6crjmhmzn5zlj2n77j57524fdsb2hvdskbdd62755mxh7 kind= uid=e054eaac-9a75-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:12:50.700567Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmib8cwndg4qg87w7s5sg6crjmhmzn5zlj2n77j57524fdsb2hvdskbdd62755mxh7\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmib8cwndg4qg87w7s5sg6crjmhmzn5zlj2n77j57524fdsb2hvdskbdd62755mxh7" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T18:27:08.427089Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-07T18:27:08.562037Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:08.564833Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:08.663683Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:08.680027Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:08.685407Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmib8cwndg4qg87w7s5sg6crjmhmzn5zlj2n77j59x787 Pod phase: Pending level=info timestamp=2018-08-07T19:12:55.382397Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:12:55.383032Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T19:12:55.387214Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:13:05.400277Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:13:05.495236Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmib8cwndg4qg87w7s5sg6crjmhmzn5zlj2n77j57524fdsb2hvdskbdd62755mxh7" level=info timestamp=2018-08-07T19:13:05.497438Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:13:05.497892Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.906 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with node tainted /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:463 the vmi with tolerations should be scheduled [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:485 Timed out after 90.012s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ level=info timestamp=2018-08-07T19:12:50.708685Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmib8cwndg4qg87w7s5sg6crjmhmzn5zlj2n77j57524fdsb2hvdskbdd62755mxh7 kind=VirtualMachineInstance uid=e054eaac-9a75-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmib8cwndg4qg87w7s5sg6crjmhmzn5zlj2n77j59x787" • ------------------------------ • [SLOW TEST:46.702 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with non default namespace /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:535 should log libvirt start and stop lifecycle events of the domain /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 kubevirt-test-default /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:50.132 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with non default namespace /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:535 should log libvirt start and stop lifecycle events of the domain /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 kubevirt-test-alternative /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.264 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:592 should enable emulation in virt-launcher [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:604 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:600 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.259 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:592 should be reflected in domain XML [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:641 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:600 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.258 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:592 should request a TUN device but not KVM [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:685 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:600 ------------------------------ •••• Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 19:18:26 http: TLS handshake error from 10.129.0.1:50770: EOF level=info timestamp=2018-08-07T19:18:27.885073Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:18:36 http: TLS handshake error from 10.129.0.1:50776: EOF 2018/08/07 19:18:46 http: TLS handshake error from 10.129.0.1:50782: EOF 2018/08/07 19:18:56 http: TLS handshake error from 10.129.0.1:50788: EOF level=info timestamp=2018-08-07T19:18:57.809575Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:19:06 http: TLS handshake error from 10.129.0.1:50794: EOF 2018/08/07 19:19:16 http: TLS handshake error from 10.129.0.1:50800: EOF 2018/08/07 19:19:26 http: TLS handshake error from 10.129.0.1:50806: EOF 2018/08/07 19:19:36 http: TLS handshake error from 10.129.0.1:50812: EOF 2018/08/07 19:19:46 http: TLS handshake error from 10.129.0.1:50818: EOF 2018/08/07 19:19:56 http: TLS handshake error from 10.129.0.1:50824: EOF 2018/08/07 19:20:06 http: TLS handshake error from 10.129.0.1:50830: EOF 2018/08/07 19:20:16 http: TLS handshake error from 10.129.0.1:50836: EOF 2018/08/07 19:20:26 http: TLS handshake error from 10.129.0.1:50842: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running 2018/08/07 19:19:56 http: TLS handshake error from 10.128.0.1:48666: EOF level=info timestamp=2018-08-07T19:19:57.453219Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T19:19:59.696609Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:19:59.710826Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:20:01.660706Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:20:06 http: TLS handshake error from 10.128.0.1:48714: EOF level=info timestamp=2018-08-07T19:20:11.848979Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:20:16 http: TLS handshake error from 10.128.0.1:48756: EOF level=info timestamp=2018-08-07T19:20:22.049508Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:20:24.043670Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:20:26 http: TLS handshake error from 10.128.0.1:48798: EOF level=info timestamp=2018-08-07T19:20:27.560496Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T19:20:30.003670Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:20:30.004923Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:20:32.262944Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T19:16:39.485470Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:16:39.639520Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-alternative/testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq" level=info timestamp=2018-08-07T19:16:39.694114Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-alternative/testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq" level=info timestamp=2018-08-07T19:17:30.360502Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2mzjcdvgwdbshgmcvtc8k7xr6mq4tdkd29snntj5fqglw62lpkz8c7wf8z2rldl kind= uid=87344b21-9a76-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:17:30.361212Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2mzjcdvgwdbshgmcvtc8k7xr6mq4tdkd29snntj5fqglw62lpkz8c7wf8z2rldl kind= uid=87344b21-9a76-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:17:30.483124Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2mzjcdvgwdbshgmcvtc8k7xr6mq4tdkd29snntj5fqglw62lpkz8c7wf8z2rldl\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2mzjcdvgwdbshgmcvtc8k7xr6mq4tdkd29snntj5fqglw62lpkz8c7wf8z2rldl" level=info timestamp=2018-08-07T19:17:30.504967Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2mzjcdvgwdbshgmcvtc8k7xr6mq4tdkd29snntj5fqglw62lpkz8c7wf8z2rldl\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2mzjcdvgwdbshgmcvtc8k7xr6mq4tdkd29snntj5fqglw62lpkz8c7wf8z2rldl" level=info timestamp=2018-08-07T19:17:31.074789Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2mzjcdvgwdbshgmcvtc8k7xr6mq4tdkd29snntj5fqglw62lpkz8c7wf8z2rldl\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2mzjcdvgwdbshgmcvtc8k7xr6mq4tdkd29snntj5fqglw62lpkz8c7wf8z2rldl, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 87344b21-9a76-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2mzjcdvgwdbshgmcvtc8k7xr6mq4tdkd29snntj5fqglw62lpkz8c7wf8z2rldl" level=info timestamp=2018-08-07T19:17:31.273907Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi745kltftb6wfwtgjxmnwjhzl87gjgpvx2ckg66sgmvfghv7jtgvwzmf6t4pzsq9 kind= uid=87c0c127-9a76-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:17:31.274252Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi745kltftb6wfwtgjxmnwjhzl87gjgpvx2ckg66sgmvfghv7jtgvwzmf6t4pzsq9 kind= uid=87c0c127-9a76-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:17:31.658315Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi745kltftb6wfwtgjxmnwjhzl87gjgpvx2ckg66sgmvfghv7jtgvwzmf6t4pzsq9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi745kltftb6wfwtgjxmnwjhzl87gjgpvx2ckg66sgmvfghv7jtgvwzmf6t4pzsq9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 87c0c127-9a76-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi745kltftb6wfwtgjxmnwjhzl87gjgpvx2ckg66sgmvfghv7jtgvwzmf6t4pzsq9" level=info timestamp=2018-08-07T19:17:32.404034Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv kind= uid=886dddc4-9a76-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:17:32.404468Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv kind= uid=886dddc4-9a76-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:17:32.535062Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv" level=info timestamp=2018-08-07T19:17:32.589185Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T19:17:14.718660Z pos=vm.go:749 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-07T19:17:14.719165Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.719265Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.719332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.719547Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.720965Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:14.721138Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.721192Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.721257Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.721378Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.721602Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:27.928104Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: false\n" level=info timestamp=2018-08-07T19:17:27.928417Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:27.928639Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:27.930186Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsqhwlk4 Pod phase: Pending level=info timestamp=2018-08-07T19:17:37.028557Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:17:37.028865Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T19:17:37.030897Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:17:48.621116Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:17:48.675345Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv" level=info timestamp=2018-08-07T19:17:48.678058Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:17:48.678711Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" ------------------------------ • Failure [180.750 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance's Pod /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:837 should result in the VirtualMachineInstance moving to a finalized state [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:838 Timed out after 90.013s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ STEP: Creating the VirtualMachineInstance level=info timestamp=2018-08-07T19:17:32.618444Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv kind=VirtualMachineInstance uid=886dddc4-9a76-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsqhwlk4" Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 19:21:26 http: TLS handshake error from 10.129.0.1:50880: EOF 2018/08/07 19:21:36 http: TLS handshake error from 10.129.0.1:50886: EOF 2018/08/07 19:21:46 http: TLS handshake error from 10.129.0.1:50892: EOF 2018/08/07 19:21:56 http: TLS handshake error from 10.129.0.1:50898: EOF 2018/08/07 19:22:06 http: TLS handshake error from 10.129.0.1:50904: EOF 2018/08/07 19:22:16 http: TLS handshake error from 10.129.0.1:50910: EOF 2018/08/07 19:22:26 http: TLS handshake error from 10.129.0.1:50916: EOF level=info timestamp=2018-08-07T19:22:27.861632Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:22:36 http: TLS handshake error from 10.129.0.1:50922: EOF 2018/08/07 19:22:46 http: TLS handshake error from 10.129.0.1:50928: EOF 2018/08/07 19:22:56 http: TLS handshake error from 10.129.0.1:50934: EOF level=info timestamp=2018-08-07T19:22:57.822492Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:23:06 http: TLS handshake error from 10.129.0.1:50940: EOF 2018/08/07 19:23:16 http: TLS handshake error from 10.129.0.1:50946: EOF 2018/08/07 19:23:26 http: TLS handshake error from 10.129.0.1:50952: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T19:22:55.020146Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:22:55.139134Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:22:56 http: TLS handshake error from 10.128.0.1:49476: EOF level=info timestamp=2018-08-07T19:23:01.381039Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:23:01.407932Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:23:05.221498Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:23:06 http: TLS handshake error from 10.128.0.1:49524: EOF level=info timestamp=2018-08-07T19:23:15.437544Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:23:16 http: TLS handshake error from 10.128.0.1:49566: EOF level=info timestamp=2018-08-07T19:23:25.370779Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:23:25.646265Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:23:26 http: TLS handshake error from 10.128.0.1:49610: EOF level=info timestamp=2018-08-07T19:23:27.444400Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T19:23:31.631805Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:23:31.692666Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T19:17:30.483124Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2mzjcdvgwdbshgmcvtc8k7xr6mq4tdkd29snntj5fqglw62lpkz8c7wf8z2rldl\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2mzjcdvgwdbshgmcvtc8k7xr6mq4tdkd29snntj5fqglw62lpkz8c7wf8z2rldl" level=info timestamp=2018-08-07T19:17:30.504967Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2mzjcdvgwdbshgmcvtc8k7xr6mq4tdkd29snntj5fqglw62lpkz8c7wf8z2rldl\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2mzjcdvgwdbshgmcvtc8k7xr6mq4tdkd29snntj5fqglw62lpkz8c7wf8z2rldl" level=info timestamp=2018-08-07T19:17:31.074789Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2mzjcdvgwdbshgmcvtc8k7xr6mq4tdkd29snntj5fqglw62lpkz8c7wf8z2rldl\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2mzjcdvgwdbshgmcvtc8k7xr6mq4tdkd29snntj5fqglw62lpkz8c7wf8z2rldl, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 87344b21-9a76-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2mzjcdvgwdbshgmcvtc8k7xr6mq4tdkd29snntj5fqglw62lpkz8c7wf8z2rldl" level=info timestamp=2018-08-07T19:17:31.273907Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi745kltftb6wfwtgjxmnwjhzl87gjgpvx2ckg66sgmvfghv7jtgvwzmf6t4pzsq9 kind= uid=87c0c127-9a76-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:17:31.274252Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi745kltftb6wfwtgjxmnwjhzl87gjgpvx2ckg66sgmvfghv7jtgvwzmf6t4pzsq9 kind= uid=87c0c127-9a76-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:17:31.658315Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi745kltftb6wfwtgjxmnwjhzl87gjgpvx2ckg66sgmvfghv7jtgvwzmf6t4pzsq9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi745kltftb6wfwtgjxmnwjhzl87gjgpvx2ckg66sgmvfghv7jtgvwzmf6t4pzsq9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 87c0c127-9a76-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi745kltftb6wfwtgjxmnwjhzl87gjgpvx2ckg66sgmvfghv7jtgvwzmf6t4pzsq9" level=info timestamp=2018-08-07T19:17:32.404034Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv kind= uid=886dddc4-9a76-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:17:32.404468Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv kind= uid=886dddc4-9a76-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:17:32.535062Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv" level=info timestamp=2018-08-07T19:17:32.589185Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv" level=info timestamp=2018-08-07T19:20:33.070984Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 886dddc4-9a76-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv" level=info timestamp=2018-08-07T19:20:33.281555Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq kind= uid=f43c1001-9a76-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:20:33.282128Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq kind= uid=f43c1001-9a76-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:20:33.447635Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq" level=info timestamp=2018-08-07T19:20:33.504305Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T19:17:14.718660Z pos=vm.go:749 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-07T19:17:14.719165Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.719265Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.719332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.719547Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.720965Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:14.721138Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.721192Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.721257Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.721378Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.721602Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:27.928104Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: false\n" level=info timestamp=2018-08-07T19:17:27.928417Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:27.928639Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:27.930186Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cndnwk7 Pod phase: Pending level=info timestamp=2018-08-07T19:20:37.487448Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:20:37.487725Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T19:20:37.489288Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:20:48.290641Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:20:48.365080Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq" level=info timestamp=2018-08-07T19:20:48.368237Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:20:48.368962Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.804 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:869 with an active pod. /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:870 should result in pod being terminated [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:871 Timed out after 90.011s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ STEP: Creating the VirtualMachineInstance level=info timestamp=2018-08-07T19:20:33.558783Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq kind=VirtualMachineInstance uid=f43c1001-9a76-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cndnwk7" Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 19:24:16 http: TLS handshake error from 10.129.0.1:50984: EOF 2018/08/07 19:24:26 http: TLS handshake error from 10.129.0.1:50990: EOF 2018/08/07 19:24:36 http: TLS handshake error from 10.129.0.1:50996: EOF 2018/08/07 19:24:46 http: TLS handshake error from 10.129.0.1:51002: EOF 2018/08/07 19:24:56 http: TLS handshake error from 10.129.0.1:51008: EOF 2018/08/07 19:25:06 http: TLS handshake error from 10.129.0.1:51014: EOF 2018/08/07 19:25:16 http: TLS handshake error from 10.129.0.1:51020: EOF 2018/08/07 19:25:26 http: TLS handshake error from 10.129.0.1:51026: EOF level=info timestamp=2018-08-07T19:25:27.780070Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:25:36 http: TLS handshake error from 10.129.0.1:51032: EOF 2018/08/07 19:25:46 http: TLS handshake error from 10.129.0.1:51038: EOF 2018/08/07 19:25:56 http: TLS handshake error from 10.129.0.1:51044: EOF 2018/08/07 19:26:06 http: TLS handshake error from 10.129.0.1:51050: EOF 2018/08/07 19:26:16 http: TLS handshake error from 10.129.0.1:51056: EOF 2018/08/07 19:26:26 http: TLS handshake error from 10.129.0.1:51062: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T19:25:58.815112Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:26:03.083840Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:26:03.085936Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:26:06 http: TLS handshake error from 10.128.0.1:50340: EOF level=info timestamp=2018-08-07T19:26:08.898917Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:26:16 http: TLS handshake error from 10.128.0.1:50386: EOF level=info timestamp=2018-08-07T19:26:16.690899Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T19:26:16.695821Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T19:26:19.061533Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:26:26 http: TLS handshake error from 10.128.0.1:50430: EOF level=info timestamp=2018-08-07T19:26:26.675937Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:26:27.455724Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T19:26:29.266019Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:26:33.380611Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:26:33.398954Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T19:17:31.274252Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi745kltftb6wfwtgjxmnwjhzl87gjgpvx2ckg66sgmvfghv7jtgvwzmf6t4pzsq9 kind= uid=87c0c127-9a76-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:17:31.658315Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi745kltftb6wfwtgjxmnwjhzl87gjgpvx2ckg66sgmvfghv7jtgvwzmf6t4pzsq9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi745kltftb6wfwtgjxmnwjhzl87gjgpvx2ckg66sgmvfghv7jtgvwzmf6t4pzsq9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 87c0c127-9a76-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi745kltftb6wfwtgjxmnwjhzl87gjgpvx2ckg66sgmvfghv7jtgvwzmf6t4pzsq9" level=info timestamp=2018-08-07T19:17:32.404034Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv kind= uid=886dddc4-9a76-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:17:32.404468Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv kind= uid=886dddc4-9a76-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:17:32.535062Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv" level=info timestamp=2018-08-07T19:17:32.589185Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv" level=info timestamp=2018-08-07T19:20:33.070984Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 886dddc4-9a76-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv" level=info timestamp=2018-08-07T19:20:33.281555Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq kind= uid=f43c1001-9a76-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:20:33.282128Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq kind= uid=f43c1001-9a76-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:20:33.447635Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq" level=info timestamp=2018-08-07T19:20:33.504305Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq" level=info timestamp=2018-08-07T19:23:33.857674Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: f43c1001-9a76-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq" level=info timestamp=2018-08-07T19:23:34.043789Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw kind= uid=5ffb652d-9a77-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:23:34.044530Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw kind= uid=5ffb652d-9a77-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:23:34.249072Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T19:17:14.718660Z pos=vm.go:749 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-07T19:17:14.719165Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.719265Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.719332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.719547Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.720965Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:14.721138Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.721192Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.721257Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.721378Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.721602Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:27.928104Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: false\n" level=info timestamp=2018-08-07T19:17:27.928417Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:27.928639Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:27.930186Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qr2z4cm Pod phase: Pending level=info timestamp=2018-08-07T19:23:38.818885Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:23:38.819202Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T19:23:38.821453Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:23:50.036478Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:23:50.139189Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw" level=info timestamp=2018-08-07T19:23:50.144162Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:23:50.144790Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.810 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:869 with ACPI and 0 grace period seconds /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:895 should result in vmi status failed [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:896 Timed out after 90.012s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ STEP: Creating the VirtualMachineInstance level=info timestamp=2018-08-07T19:23:34.301457Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw kind=VirtualMachineInstance uid=5ffb652d-9a77-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qr2z4cm" Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running level=info timestamp=2018-08-07T19:27:27.774216Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:27:36 http: TLS handshake error from 10.129.0.1:51106: EOF 2018/08/07 19:27:46 http: TLS handshake error from 10.129.0.1:51112: EOF 2018/08/07 19:27:56 http: TLS handshake error from 10.129.0.1:51118: EOF level=info timestamp=2018-08-07T19:27:57.841051Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:28:06 http: TLS handshake error from 10.129.0.1:51124: EOF 2018/08/07 19:28:16 http: TLS handshake error from 10.129.0.1:51130: EOF 2018/08/07 19:28:26 http: TLS handshake error from 10.129.0.1:51136: EOF 2018/08/07 19:28:36 http: TLS handshake error from 10.129.0.1:51142: EOF 2018/08/07 19:28:46 http: TLS handshake error from 10.129.0.1:51148: EOF 2018/08/07 19:28:56 http: TLS handshake error from 10.129.0.1:51154: EOF level=info timestamp=2018-08-07T19:28:57.805252Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:29:06 http: TLS handshake error from 10.129.0.1:51160: EOF 2018/08/07 19:29:16 http: TLS handshake error from 10.129.0.1:51166: EOF 2018/08/07 19:29:26 http: TLS handshake error from 10.129.0.1:51172: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running 2018/08/07 19:28:56 http: TLS handshake error from 10.128.0.1:51088: EOF level=info timestamp=2018-08-07T19:28:57.824529Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:29:02.394194Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:29:04.892208Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:29:04.894864Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:29:06 http: TLS handshake error from 10.128.0.1:51136: EOF level=info timestamp=2018-08-07T19:29:12.573016Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:29:16 http: TLS handshake error from 10.128.0.1:51178: EOF level=info timestamp=2018-08-07T19:29:16.715180Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T19:29:16.718930Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T19:29:22.755273Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:29:26 http: TLS handshake error from 10.128.0.1:51220: EOF level=info timestamp=2018-08-07T19:29:27.492368Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T19:29:28.061710Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:29:32.958876Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T19:17:32.404468Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv kind= uid=886dddc4-9a76-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:17:32.535062Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv" level=info timestamp=2018-08-07T19:17:32.589185Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv" level=info timestamp=2018-08-07T19:20:33.070984Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 886dddc4-9a76-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6r9lgmjvpjb9phtwgcrwqqd6lfvwtl6bgpqsq5rkdd9vzs8fbqts99dl4f4pqxv" level=info timestamp=2018-08-07T19:20:33.281555Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq kind= uid=f43c1001-9a76-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:20:33.282128Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq kind= uid=f43c1001-9a76-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:20:33.447635Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq" level=info timestamp=2018-08-07T19:20:33.504305Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq" level=info timestamp=2018-08-07T19:23:33.857674Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: f43c1001-9a76-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq" level=info timestamp=2018-08-07T19:23:34.043789Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw kind= uid=5ffb652d-9a77-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:23:34.044530Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw kind= uid=5ffb652d-9a77-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:23:34.249072Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw" level=info timestamp=2018-08-07T19:26:34.705584Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5ffb652d-9a77-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw" level=info timestamp=2018-08-07T19:26:34.890296Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv kind= uid=cbc557c5-9a77-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:26:34.891076Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv kind= uid=cbc557c5-9a77-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T19:17:14.718660Z pos=vm.go:749 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-07T19:17:14.719165Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.719265Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.719332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.719547Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.720965Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:14.721138Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.721192Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.721257Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.721378Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.721602Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:27.928104Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: false\n" level=info timestamp=2018-08-07T19:17:27.928417Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:27.928639Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:27.930186Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j98p94x Pod phase: Pending level=info timestamp=2018-08-07T19:26:39.082745Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:26:39.083043Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T19:26:39.084921Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:26:49.095038Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:26:49.172253Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv" level=info timestamp=2018-08-07T19:26:49.174467Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:26:49.174903Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.896 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:869 with ACPI and some grace period seconds /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:920 should result in vmi status succeeded [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:921 Timed out after 90.011s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ STEP: Creating the VirtualMachineInstance level=info timestamp=2018-08-07T19:26:35.134906Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv kind=VirtualMachineInstance uid=cbc557c5-9a77-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j98p94x" Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 19:30:26 http: TLS handshake error from 10.129.0.1:51210: EOF 2018/08/07 19:30:36 http: TLS handshake error from 10.129.0.1:51216: EOF 2018/08/07 19:30:46 http: TLS handshake error from 10.129.0.1:51222: EOF 2018/08/07 19:30:56 http: TLS handshake error from 10.129.0.1:51228: EOF level=info timestamp=2018-08-07T19:30:57.831607Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:31:06 http: TLS handshake error from 10.129.0.1:51234: EOF 2018/08/07 19:31:16 http: TLS handshake error from 10.129.0.1:51240: EOF 2018/08/07 19:31:26 http: TLS handshake error from 10.129.0.1:51246: EOF 2018/08/07 19:31:36 http: TLS handshake error from 10.129.0.1:51252: EOF 2018/08/07 19:31:46 http: TLS handshake error from 10.129.0.1:51258: EOF 2018/08/07 19:31:56 http: TLS handshake error from 10.129.0.1:51264: EOF 2018/08/07 19:32:06 http: TLS handshake error from 10.129.0.1:51270: EOF 2018/08/07 19:32:16 http: TLS handshake error from 10.129.0.1:51276: EOF 2018/08/07 19:32:26 http: TLS handshake error from 10.129.0.1:51282: EOF 2018/08/07 19:32:36 http: TLS handshake error from 10.129.0.1:51288: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running 2018/08/07 19:31:56 http: TLS handshake error from 10.128.0.1:51898: EOF level=info timestamp=2018-08-07T19:31:57.514579Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T19:31:59.229380Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:32:05.786256Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:32:06 http: TLS handshake error from 10.128.0.1:51946: EOF level=info timestamp=2018-08-07T19:32:06.754199Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:32:06.791314Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:32:16.005403Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:32:16 http: TLS handshake error from 10.128.0.1:51988: EOF level=info timestamp=2018-08-07T19:32:16.753723Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T19:32:16.759330Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T19:32:26.198833Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:32:26 http: TLS handshake error from 10.128.0.1:52030: EOF level=info timestamp=2018-08-07T19:32:27.526481Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T19:32:29.435491Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T19:20:33.447635Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq" level=info timestamp=2018-08-07T19:20:33.504305Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq" level=info timestamp=2018-08-07T19:23:33.857674Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: f43c1001-9a76-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9z5jhjcvbrl8pdrdhcsnv2z4c8pdlfh7645cnbpvz979jrnslmwh62krxxvwwdq" level=info timestamp=2018-08-07T19:23:34.043789Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw kind= uid=5ffb652d-9a77-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:23:34.044530Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw kind= uid=5ffb652d-9a77-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:23:34.249072Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw" level=info timestamp=2018-08-07T19:26:34.705584Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5ffb652d-9a77-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw" level=info timestamp=2018-08-07T19:26:34.890296Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv kind= uid=cbc557c5-9a77-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:26:34.891076Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv kind= uid=cbc557c5-9a77-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:29:35.618989Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: cbc557c5-9a77-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv" level=info timestamp=2018-08-07T19:29:35.639939Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: cbc557c5-9a77-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv" level=info timestamp=2018-08-07T19:29:35.885463Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74 kind= uid=37a6e015-9a78-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:29:35.886192Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74 kind= uid=37a6e015-9a78-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:29:36.074501Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74" level=info timestamp=2018-08-07T19:29:36.155069Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T19:17:14.718660Z pos=vm.go:749 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-07T19:17:14.719165Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.719265Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.719332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.719547Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.720965Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:14.721138Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.721192Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.721257Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.721378Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.721602Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:27.928104Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: false\n" level=info timestamp=2018-08-07T19:17:27.928417Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:27.928639Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:27.930186Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9dbkx8 Pod phase: Pending level=info timestamp=2018-08-07T19:29:40.039898Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:29:40.040203Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T19:29:40.042238Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:29:50.780238Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:29:50.860377Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74" level=info timestamp=2018-08-07T19:29:50.862213Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:29:50.862640Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [181.062 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:869 with grace period greater than 0 /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:945 should run graceful shutdown [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:946 Timed out after 90.011s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ STEP: Setting a VirtualMachineInstance termination grace period to 5 STEP: Creating the VirtualMachineInstance level=info timestamp=2018-08-07T19:29:36.170806Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74 kind=VirtualMachineInstance uid=37a6e015-9a78-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9dbkx8" Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 19:33:26 http: TLS handshake error from 10.129.0.1:51320: EOF 2018/08/07 19:33:36 http: TLS handshake error from 10.129.0.1:51326: EOF 2018/08/07 19:33:46 http: TLS handshake error from 10.129.0.1:51332: EOF 2018/08/07 19:33:56 http: TLS handshake error from 10.129.0.1:51338: EOF 2018/08/07 19:34:06 http: TLS handshake error from 10.129.0.1:51344: EOF 2018/08/07 19:34:16 http: TLS handshake error from 10.129.0.1:51350: EOF 2018/08/07 19:34:26 http: TLS handshake error from 10.129.0.1:51356: EOF 2018/08/07 19:34:36 http: TLS handshake error from 10.129.0.1:51362: EOF 2018/08/07 19:34:46 http: TLS handshake error from 10.129.0.1:51368: EOF 2018/08/07 19:34:56 http: TLS handshake error from 10.129.0.1:51374: EOF level=info timestamp=2018-08-07T19:34:57.815516Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:35:06 http: TLS handshake error from 10.129.0.1:51380: EOF 2018/08/07 19:35:16 http: TLS handshake error from 10.129.0.1:51386: EOF 2018/08/07 19:35:26 http: TLS handshake error from 10.129.0.1:51392: EOF 2018/08/07 19:35:36 http: TLS handshake error from 10.129.0.1:51398: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T19:34:49.354515Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:34:56 http: TLS handshake error from 10.128.0.1:52686: EOF level=info timestamp=2018-08-07T19:34:59.562189Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:35:00.532029Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:35:06 http: TLS handshake error from 10.128.0.1:52732: EOF level=info timestamp=2018-08-07T19:35:08.764346Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:35:08.768464Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:35:09.781503Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:35:16 http: TLS handshake error from 10.128.0.1:52774: EOF level=info timestamp=2018-08-07T19:35:20.023942Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:35:26 http: TLS handshake error from 10.128.0.1:52816: EOF level=info timestamp=2018-08-07T19:35:27.519607Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T19:35:30.299720Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:35:30.724829Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:35:36 http: TLS handshake error from 10.128.0.1:52862: EOF Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T19:23:34.044530Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw kind= uid=5ffb652d-9a77-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:23:34.249072Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw" level=info timestamp=2018-08-07T19:26:34.705584Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5ffb652d-9a77-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqsbzct4mh2d66jtlqmkcmtq9d6gbbxf27q8qrlvkdt2s7wlzmq9snjfl66z99gw" level=info timestamp=2018-08-07T19:26:34.890296Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv kind= uid=cbc557c5-9a77-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:26:34.891076Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv kind= uid=cbc557c5-9a77-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:29:35.618989Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: cbc557c5-9a77-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv" level=info timestamp=2018-08-07T19:29:35.639939Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: cbc557c5-9a77-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv" level=info timestamp=2018-08-07T19:29:35.885463Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74 kind= uid=37a6e015-9a78-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:29:35.886192Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74 kind= uid=37a6e015-9a78-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:29:36.074501Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74" level=info timestamp=2018-08-07T19:29:36.155069Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74" level=info timestamp=2018-08-07T19:32:36.692150Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 37a6e015-9a78-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74" level=info timestamp=2018-08-07T19:32:36.889829Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiw7h9gphjb7d4t6qlwst2w6bmnvdg8hk89xbpfkfrktfpd4gs9nj2ghbsrhfttgl kind= uid=a38b1a2c-9a78-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:32:36.890461Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiw7h9gphjb7d4t6qlwst2w6bmnvdg8hk89xbpfkfrktfpd4gs9nj2ghbsrhfttgl kind= uid=a38b1a2c-9a78-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:32:37.050830Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiw7h9gphjb7d4t6qlwst2w6bmnvdg8hk89xbpfkfrktfpd4gs9nj2ghbsrhfttgl\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiw7h9gphjb7d4t6qlwst2w6bmnvdg8hk89xbpfkfrktfpd4gs9nj2ghbsrhfttgl" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T19:17:14.718660Z pos=vm.go:749 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-07T19:17:14.719165Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.719265Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.719332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.719547Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.720965Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:14.721138Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.721192Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.721257Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.721378Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.721602Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:27.928104Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: false\n" level=info timestamp=2018-08-07T19:17:27.928417Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:27.928639Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:27.930186Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmiw7h9gphjb7d4t6qlwst2w6bmnvdg8hk89xbpfc4vnw Pod phase: Pending level=info timestamp=2018-08-07T19:32:41.224843Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:32:41.225530Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T19:32:41.229509Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:32:52.554573Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:32:52.622239Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiw7h9gphjb7d4t6qlwst2w6bmnvdg8hk89xbpfkfrktfpd4gs9nj2ghbsrhfttgl" level=info timestamp=2018-08-07T19:32:52.623978Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:32:52.624624Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.905 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Killed VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:997 should be in Failed phase [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:998 Timed out after 90.012s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ STEP: Starting a VirtualMachineInstance level=info timestamp=2018-08-07T19:32:37.186818Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmiw7h9gphjb7d4t6qlwst2w6bmnvdg8hk89xbpfkfrktfpd4gs9nj2ghbsrhfttgl kind=VirtualMachineInstance uid=a38b1a2c-9a78-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmiw7h9gphjb7d4t6qlwst2w6bmnvdg8hk89xbpfc4vnw" Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 19:36:56 http: TLS handshake error from 10.129.0.1:51448: EOF level=info timestamp=2018-08-07T19:36:57.825772Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:37:06 http: TLS handshake error from 10.129.0.1:51454: EOF 2018/08/07 19:37:16 http: TLS handshake error from 10.129.0.1:51460: EOF 2018/08/07 19:37:26 http: TLS handshake error from 10.129.0.1:51466: EOF level=info timestamp=2018-08-07T19:37:27.848301Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:37:36 http: TLS handshake error from 10.129.0.1:51472: EOF 2018/08/07 19:37:46 http: TLS handshake error from 10.129.0.1:51478: EOF 2018/08/07 19:37:56 http: TLS handshake error from 10.129.0.1:51484: EOF level=info timestamp=2018-08-07T19:37:57.812663Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:38:06 http: TLS handshake error from 10.129.0.1:51490: EOF 2018/08/07 19:38:16 http: TLS handshake error from 10.129.0.1:51496: EOF 2018/08/07 19:38:26 http: TLS handshake error from 10.129.0.1:51502: EOF level=info timestamp=2018-08-07T19:38:27.823818Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:38:36 http: TLS handshake error from 10.129.0.1:51508: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running 2018/08/07 19:37:46 http: TLS handshake error from 10.128.0.1:53464: EOF level=info timestamp=2018-08-07T19:37:53.167091Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:37:56 http: TLS handshake error from 10.128.0.1:53506: EOF level=info timestamp=2018-08-07T19:38:01.933076Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:38:03.365599Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:38:06 http: TLS handshake error from 10.128.0.1:53552: EOF level=info timestamp=2018-08-07T19:38:10.582842Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:38:10.600338Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:38:13.557723Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:38:16 http: TLS handshake error from 10.128.0.1:53594: EOF level=info timestamp=2018-08-07T19:38:23.749558Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:38:26 http: TLS handshake error from 10.128.0.1:53636: EOF level=info timestamp=2018-08-07T19:38:32.143087Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:38:33.967777Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:38:36 http: TLS handshake error from 10.128.0.1:53682: EOF Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T19:26:34.891076Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv kind= uid=cbc557c5-9a77-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:29:35.618989Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: cbc557c5-9a77-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv" level=info timestamp=2018-08-07T19:29:35.639939Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: cbc557c5-9a77-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz8rw86z778dx9cpgc5tkklz7q2v2b6w62p2j9m9bk8msd7p697ztfvcb5k7l2rv" level=info timestamp=2018-08-07T19:29:35.885463Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74 kind= uid=37a6e015-9a78-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:29:35.886192Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74 kind= uid=37a6e015-9a78-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:29:36.074501Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74" level=info timestamp=2018-08-07T19:29:36.155069Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74" level=info timestamp=2018-08-07T19:32:36.692150Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 37a6e015-9a78-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi87jmtdftgncrljbs6vthgdhqd629gkqn667x9bxgkp6n94jgtlpgbx6skh86w74" level=info timestamp=2018-08-07T19:32:36.889829Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiw7h9gphjb7d4t6qlwst2w6bmnvdg8hk89xbpfkfrktfpd4gs9nj2ghbsrhfttgl kind= uid=a38b1a2c-9a78-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:32:36.890461Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiw7h9gphjb7d4t6qlwst2w6bmnvdg8hk89xbpfkfrktfpd4gs9nj2ghbsrhfttgl kind= uid=a38b1a2c-9a78-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:32:37.050830Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiw7h9gphjb7d4t6qlwst2w6bmnvdg8hk89xbpfkfrktfpd4gs9nj2ghbsrhfttgl\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiw7h9gphjb7d4t6qlwst2w6bmnvdg8hk89xbpfkfrktfpd4gs9nj2ghbsrhfttgl" level=info timestamp=2018-08-07T19:35:37.591376Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiw7h9gphjb7d4t6qlwst2w6bmnvdg8hk89xbpfkfrktfpd4gs9nj2ghbsrhfttgl\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiw7h9gphjb7d4t6qlwst2w6bmnvdg8hk89xbpfkfrktfpd4gs9nj2ghbsrhfttgl, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: a38b1a2c-9a78-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiw7h9gphjb7d4t6qlwst2w6bmnvdg8hk89xbpfkfrktfpd4gs9nj2ghbsrhfttgl" level=info timestamp=2018-08-07T19:35:37.805537Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4mfrvp4mq5xgtzf9x755m4xpwk7frtsrtbrgwl596pnvt4hbhmwmf4l47wk4l42 kind= uid=0f6025a6-9a79-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:35:37.806360Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4mfrvp4mq5xgtzf9x755m4xpwk7frtsrtbrgwl596pnvt4hbhmwmf4l47wk4l42 kind= uid=0f6025a6-9a79-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:35:37.993504Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi4mfrvp4mq5xgtzf9x755m4xpwk7frtsrtbrgwl596pnvt4hbhmwmf4l47wk4l42\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi4mfrvp4mq5xgtzf9x755m4xpwk7frtsrtbrgwl596pnvt4hbhmwmf4l47wk4l42" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T19:17:14.718660Z pos=vm.go:749 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-07T19:17:14.719165Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.719265Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.719332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.719547Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.720965Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:14.721138Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.721192Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.721257Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.721378Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.721602Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:27.928104Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: false\n" level=info timestamp=2018-08-07T19:17:27.928417Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:27.928639Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:27.930186Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmi4mfrvp4mq5xgtzf9x755m4xpwk7frtsrtbrgwxstd4 Pod phase: Pending level=info timestamp=2018-08-07T19:35:42.125979Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:35:42.126305Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T19:35:42.128186Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:35:53.086926Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:35:53.129668Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi4mfrvp4mq5xgtzf9x755m4xpwk7frtsrtbrgwl596pnvt4hbhmwmf4l47wk4l42" level=info timestamp=2018-08-07T19:35:53.130532Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:35:53.130721Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.921 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Killed VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:997 should be left alone by virt-handler [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:1025 Timed out after 90.012s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ STEP: Starting a VirtualMachineInstance level=info timestamp=2018-08-07T19:35:38.029693Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmi4mfrvp4mq5xgtzf9x755m4xpwk7frtsrtbrgwl596pnvt4hbhmwmf4l47wk4l42 kind=VirtualMachineInstance uid=0f6025a6-9a79-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi4mfrvp4mq5xgtzf9x755m4xpwk7frtsrtbrgwxstd4" • ------------------------------ • [SLOW TEST:7.937 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should scale /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 to five, to six and then to zero replicas /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ •• Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 19:38:46 http: TLS handshake error from 10.129.0.1:51514: EOF 2018/08/07 19:38:56 http: TLS handshake error from 10.129.0.1:51522: EOF level=info timestamp=2018-08-07T19:38:57.820071Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:39:06 http: TLS handshake error from 10.129.0.1:51528: EOF 2018/08/07 19:39:16 http: TLS handshake error from 10.129.0.1:51534: EOF 2018/08/07 19:39:26 http: TLS handshake error from 10.129.0.1:51540: EOF 2018/08/07 19:39:36 http: TLS handshake error from 10.129.0.1:51546: EOF 2018/08/07 19:39:46 http: TLS handshake error from 10.129.0.1:51552: EOF 2018/08/07 19:39:56 http: TLS handshake error from 10.129.0.1:51558: EOF 2018/08/07 19:40:06 http: TLS handshake error from 10.129.0.1:51564: EOF 2018/08/07 19:40:16 http: TLS handshake error from 10.129.0.1:51570: EOF 2018/08/07 19:40:26 http: TLS handshake error from 10.129.0.1:51576: EOF level=info timestamp=2018-08-07T19:40:27.791185Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:40:36 http: TLS handshake error from 10.129.0.1:51582: EOF 2018/08/07 19:40:46 http: TLS handshake error from 10.129.0.1:51588: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T19:40:06.036366Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:40:06 http: TLS handshake error from 10.128.0.1:54100: EOF level=info timestamp=2018-08-07T19:40:12.046617Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:40:12.049857Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:40:16.239624Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:40:16 http: TLS handshake error from 10.128.0.1:54144: EOF level=info timestamp=2018-08-07T19:40:26.441263Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:40:26 http: TLS handshake error from 10.128.0.1:54186: EOF level=info timestamp=2018-08-07T19:40:32.996591Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:40:36 http: TLS handshake error from 10.128.0.1:54234: EOF level=info timestamp=2018-08-07T19:40:36.664180Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:40:42.322957Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:40:42.332797Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:40:46 http: TLS handshake error from 10.128.0.1:54276: EOF level=info timestamp=2018-08-07T19:40:46.853948Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T19:38:51.034685Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5twtzmv57hgzjl2sptkd9xczm6n8bl888gwfplc5qntfqvxjtfvxwmmw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5twtzmv57hgzjl2sptkd9xczm6n8bl888gwfplc5qntfqvxjtfvxwmmw" level=info timestamp=2018-08-07T19:38:51.306878Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5twtzmv57hgzjl2sptkd9xczm6n8bl888gwfplc5qntfqvxjtfvxwmmw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5twtzmv57hgzjl2sptkd9xczm6n8bl888gwfplc5qntfqvxjtfvxwmmw" level=info timestamp=2018-08-07T19:38:51.457861Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5twtzmv57hgzjl2sptkd9xczm6n8bl888gwfplc5qntfqvxjtfvxwmmw\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi5twtzmv57hgzjl2sptkd9xczm6n8bl888gwfplc5qntfqvxjtfvxwmmw, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 7fec0b3f-9a79-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5twtzmv57hgzjl2sptkd9xczm6n8bl888gwfplc5qntfqvxjtfvxwmmw" level=error timestamp=2018-08-07T19:38:52.924979Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimc95qzg6jsm6ldl2pgcbfcwh56sbzn95v2rnbdx6lz977ps2pxxf2j9m kind= uid=83a6492d-9a79-11e8-b575-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmimc95qzg6jsm6ldl2pgcbfcwh56sbzn95v2rnbdx6lz977ps2pxxf2j9m" level=info timestamp=2018-08-07T19:38:52.925352Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimc95qzg6jsm6ldl2pgcbfcwh56sbzn95v2rnbdx6lz977ps2pxxf2j9m kind= uid=83a6492d-9a79-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:38:52.925607Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimc95qzg6jsm6ldl2pgcbfcwh56sbzn95v2rnbdx6lz977ps2pxxf2j9m kind= uid=83a6492d-9a79-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=error timestamp=2018-08-07T19:38:52.949576Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimc95qzg6jsm6ldl2pgcbfcwh56sbzn95v2rnbdx6lz977ps2pxxxxvjs kind= uid=83a6eb96-9a79-11e8-b575-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmimc95qzg6jsm6ldl2pgcbfcwh56sbzn95v2rnbdx6lz977ps2pxxxxvjs" level=info timestamp=2018-08-07T19:38:52.949862Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimc95qzg6jsm6ldl2pgcbfcwh56sbzn95v2rnbdx6lz977ps2pxxxxvjs kind= uid=83a6eb96-9a79-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:38:52.949997Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimc95qzg6jsm6ldl2pgcbfcwh56sbzn95v2rnbdx6lz977ps2pxxxxvjs kind= uid=83a6eb96-9a79-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=error timestamp=2018-08-07T19:38:53.138893Z pos=replicaset.go:230 component=virt-controller service=http namespace=kubevirt-test-default name=replicasetn92xq kind= uid=839c0424-9a79-11e8-b575-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstancereplicasets.kubevirt.io \"replicasetn92xq\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the replicaset status failed." level=info timestamp=2018-08-07T19:38:53.140068Z pos=replicaset.go:137 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstancereplicasets.kubevirt.io \"replicasetn92xq\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstanceReplicaSet kubevirt-test-default/replicasetn92xq" level=info timestamp=2018-08-07T19:38:53.220646Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimc95qzg6jsm6ldl2pgcbfcwh56sbzn95v2rnbdx6lz977ps2pxxf2j9m\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimc95qzg6jsm6ldl2pgcbfcwh56sbzn95v2rnbdx6lz977ps2pxxf2j9m" level=info timestamp=2018-08-07T19:38:53.837154Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimc95qzg6jsm6ldl2pgcbfcwh56sbzn95v2rnbdx6lz977ps2pxxxxvjs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimc95qzg6jsm6ldl2pgcbfcwh56sbzn95v2rnbdx6lz977ps2pxxxxvjs" level=info timestamp=2018-08-07T19:38:54.032551Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimc95qzg6jsm6ldl2pgcbfcwh56sbzn95v2rnbdx6lz977ps2pxxf2j9m\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimc95qzg6jsm6ldl2pgcbfcwh56sbzn95v2rnbdx6lz977ps2pxxf2j9m" level=info timestamp=2018-08-07T19:38:54.411105Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimc95qzg6jsm6ldl2pgcbfcwh56sbzn95v2rnbdx6lz977ps2pxxxxvjs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimc95qzg6jsm6ldl2pgcbfcwh56sbzn95v2rnbdx6lz977ps2pxxxxvjs" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T19:17:14.718660Z pos=vm.go:749 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-07T19:17:14.719165Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.719265Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.719332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.719547Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.720965Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:14.721138Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.721192Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.721257Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.721378Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.721602Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:27.928104Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: false\n" level=info timestamp=2018-08-07T19:17:27.928417Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:27.928639Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:27.930186Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmimc95qzg6jsm6ldl2pgcbfcwh56sbzn95v2rnbkk7bd Pod phase: Pending level=info timestamp=2018-08-07T19:38:57.985012Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:38:57.985619Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T19:38:57.989090Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:39:09.630073Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:39:09.722102Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmimc95qzg6jsm6ldl2pgcbfcwh56sbzn95v2rnbdx6lz977ps2pxxxxvjs" level=info timestamp=2018-08-07T19:39:09.727709Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:39:09.728247Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmimc95qzg6jsm6ldl2pgcbfcwh56sbzn95v2rnbkq27p Pod phase: Pending level=info timestamp=2018-08-07T19:38:57.313085Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:38:57.313352Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T19:38:57.315353Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:39:07.325847Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:39:07.420359Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmimc95qzg6jsm6ldl2pgcbfcwh56sbzn95v2rnbdx6lz977ps2pxxf2j9m" level=info timestamp=2018-08-07T19:39:07.426727Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:39:07.427945Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" ------------------------------ • Failure [121.831 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should update readyReplicas once VMIs are up [It] /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:157 Timed out after 120.000s. Expected : 0 to equal : 2 /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:166 ------------------------------ STEP: Create a new VirtualMachineInstance replica set STEP: Scaling to 2 STEP: Checking the number of replicas STEP: Checking the number of ready replicas • [SLOW TEST:5.761 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should remove VMIs once it is marked for deletion /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:169 ------------------------------ • ------------------------------ • [SLOW TEST:5.570 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should not scale when paused and scale when resume /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:223 ------------------------------ • Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 19:41:56 http: TLS handshake error from 10.129.0.1:51632: EOF 2018/08/07 19:42:06 http: TLS handshake error from 10.129.0.1:51638: EOF 2018/08/07 19:42:16 http: TLS handshake error from 10.129.0.1:51644: EOF 2018/08/07 19:42:26 http: TLS handshake error from 10.129.0.1:51650: EOF 2018/08/07 19:42:36 http: TLS handshake error from 10.129.0.1:51656: EOF 2018/08/07 19:42:46 http: TLS handshake error from 10.129.0.1:51662: EOF 2018/08/07 19:42:56 http: TLS handshake error from 10.129.0.1:51668: EOF 2018/08/07 19:43:06 http: TLS handshake error from 10.129.0.1:51674: EOF 2018/08/07 19:43:16 http: TLS handshake error from 10.129.0.1:51680: EOF 2018/08/07 19:43:26 http: TLS handshake error from 10.129.0.1:51686: EOF level=info timestamp=2018-08-07T19:43:27.844934Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:43:36 http: TLS handshake error from 10.129.0.1:51692: EOF 2018/08/07 19:43:46 http: TLS handshake error from 10.129.0.1:51698: EOF 2018/08/07 19:43:56 http: TLS handshake error from 10.129.0.1:51704: EOF 2018/08/07 19:44:06 http: TLS handshake error from 10.129.0.1:51710: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running 2018/08/07 19:43:26 http: TLS handshake error from 10.128.0.1:54974: EOF level=info timestamp=2018-08-07T19:43:30.119931Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:43:34.231567Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:43:36 http: TLS handshake error from 10.128.0.1:55020: EOF level=info timestamp=2018-08-07T19:43:40.303217Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:43:44.218775Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:43:44.219051Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:43:46 http: TLS handshake error from 10.128.0.1:55062: EOF level=info timestamp=2018-08-07T19:43:50.489307Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:43:56 http: TLS handshake error from 10.128.0.1:55104: EOF level=info timestamp=2018-08-07T19:43:57.526924Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T19:44:00.700687Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:44:04.449524Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:44:06 http: TLS handshake error from 10.128.0.1:55150: EOF level=info timestamp=2018-08-07T19:44:10.897678Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T19:41:08.915654Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbxcc58g kind= uid=d4ab7956-9a79-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:41:08.915806Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbxcc58g kind= uid=d4ab7956-9a79-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=error timestamp=2018-08-07T19:41:08.919298Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbxbbbnl kind= uid=d4adb41b-9a79-11e8-b575-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbxbbbnl" level=info timestamp=2018-08-07T19:41:08.920064Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbxbbbnl kind= uid=d4adb41b-9a79-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:41:08.920479Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbxbbbnl kind= uid=d4adb41b-9a79-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=error timestamp=2018-08-07T19:41:09.229840Z pos=replicaset.go:230 component=virt-controller service=http namespace=kubevirt-test-default name=replicasetd7mn8 kind= uid=d4a231f7-9a79-11e8-b575-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstancereplicasets.kubevirt.io \"replicasetd7mn8\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the replicaset status failed." level=info timestamp=2018-08-07T19:41:09.230085Z pos=replicaset.go:137 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstancereplicasets.kubevirt.io \"replicasetd7mn8\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstanceReplicaSet kubevirt-test-default/replicasetd7mn8" level=info timestamp=2018-08-07T19:41:09.816033Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbxcc58g\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbxcc58g" level=error timestamp=2018-08-07T19:41:10.628815Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm kind= uid=d5c0dc8c-9a79-11e8-b575-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm" level=info timestamp=2018-08-07T19:41:10.628945Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm kind= uid=d5c0dc8c-9a79-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:41:10.629010Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm kind= uid=d5c0dc8c-9a79-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:41:11.611539Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm" level=info timestamp=2018-08-07T19:41:12.234183Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm" level=info timestamp=2018-08-07T19:41:13.451465Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hghsnq528fvjq9tpgxrd8wc8jshp kind= uid=d76ddd3b-9a79-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:41:13.452753Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hghsnq528fvjq9tpgxrd8wc8jshp kind= uid=d76ddd3b-9a79-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T19:17:14.718660Z pos=vm.go:749 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-07T19:17:14.719165Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.719265Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.719332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.719547Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.720965Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:14.721138Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.721192Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.721257Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.721378Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.721602Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:27.928104Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: false\n" level=info timestamp=2018-08-07T19:17:27.928417Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:27.928639Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:27.930186Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hgzrrtg Pod phase: Pending level=info timestamp=2018-08-07T19:41:19.440621Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:41:19.441090Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[OnDefineDomain:[0xc42041c200]]" level=info timestamp=2018-08-07T19:41:19.444945Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:41:30.000822Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:41:30.113604Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hghsnq528fvjq9tpgxrd8wc8jshp" level=info timestamp=2018-08-07T19:41:30.116500Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:41:30.117073Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" ------------------------------ • Failure [180.942 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should successfully start with hook sidecar annotation [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:60 Timed out after 90.013s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ STEP: Starting a VMI level=info timestamp=2018-08-07T19:41:13.664685Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hghsnq528fvjq9tpgxrd8wc8jshp kind=VirtualMachineInstance uid=d76ddd3b-9a79-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hgzrrtg" Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running level=info timestamp=2018-08-07T19:45:27.991172Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:45:36 http: TLS handshake error from 10.129.0.1:51766: EOF 2018/08/07 19:45:46 http: TLS handshake error from 10.129.0.1:51772: EOF 2018/08/07 19:45:56 http: TLS handshake error from 10.129.0.1:51778: EOF level=info timestamp=2018-08-07T19:45:57.603196Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T19:45:57.874052Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:46:06 http: TLS handshake error from 10.129.0.1:51784: EOF 2018/08/07 19:46:16 http: TLS handshake error from 10.129.0.1:51790: EOF 2018/08/07 19:46:26 http: TLS handshake error from 10.129.0.1:51796: EOF level=info timestamp=2018-08-07T19:46:27.872680Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:46:36 http: TLS handshake error from 10.129.0.1:51802: EOF 2018/08/07 19:46:46 http: TLS handshake error from 10.129.0.1:51808: EOF 2018/08/07 19:46:56 http: TLS handshake error from 10.129.0.1:51814: EOF level=info timestamp=2018-08-07T19:46:57.904378Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:47:06 http: TLS handshake error from 10.129.0.1:51820: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T19:46:19.837467Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-07T19:46:23.468223Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:46:26 http: TLS handshake error from 10.128.0.1:55796: EOF level=info timestamp=2018-08-07T19:46:33.637261Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:46:35.500344Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:46:36 http: TLS handshake error from 10.128.0.1:55842: EOF level=info timestamp=2018-08-07T19:46:43.822054Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:46:45.957045Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:46:45.973166Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:46:46 http: TLS handshake error from 10.128.0.1:55884: EOF level=info timestamp=2018-08-07T19:46:54.016614Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:46:56 http: TLS handshake error from 10.128.0.1:55926: EOF level=info timestamp=2018-08-07T19:47:04.218677Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:47:05.736028Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:47:06 http: TLS handshake error from 10.128.0.1:55972: EOF Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=error timestamp=2018-08-07T19:41:09.229840Z pos=replicaset.go:230 component=virt-controller service=http namespace=kubevirt-test-default name=replicasetd7mn8 kind= uid=d4a231f7-9a79-11e8-b575-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstancereplicasets.kubevirt.io \"replicasetd7mn8\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the replicaset status failed." level=info timestamp=2018-08-07T19:41:09.230085Z pos=replicaset.go:137 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstancereplicasets.kubevirt.io \"replicasetd7mn8\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstanceReplicaSet kubevirt-test-default/replicasetd7mn8" level=info timestamp=2018-08-07T19:41:09.816033Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbxcc58g\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbxcc58g" level=error timestamp=2018-08-07T19:41:10.628815Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm kind= uid=d5c0dc8c-9a79-11e8-b575-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm" level=info timestamp=2018-08-07T19:41:10.628945Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm kind= uid=d5c0dc8c-9a79-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:41:10.629010Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm kind= uid=d5c0dc8c-9a79-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:41:11.611539Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm" level=info timestamp=2018-08-07T19:41:12.234183Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm" level=info timestamp=2018-08-07T19:41:13.451465Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hghsnq528fvjq9tpgxrd8wc8jshp kind= uid=d76ddd3b-9a79-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:41:13.452753Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hghsnq528fvjq9tpgxrd8wc8jshp kind= uid=d76ddd3b-9a79-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:44:14.043157Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hghsnq528fvjq9tpgxrd8wc8jshp\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hghsnq528fvjq9tpgxrd8wc8jshp, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d76ddd3b-9a79-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hghsnq528fvjq9tpgxrd8wc8jshp" level=info timestamp=2018-08-07T19:44:14.251245Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4 kind= uid=43337e79-9a7a-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:44:14.254650Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4 kind= uid=43337e79-9a7a-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:44:14.422756Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4" level=info timestamp=2018-08-07T19:44:14.480626Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T19:17:14.718660Z pos=vm.go:749 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-07T19:17:14.719165Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.719265Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.719332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.719547Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.720965Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:14.721138Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.721192Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.721257Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.721378Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.721602Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:27.928104Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: false\n" level=info timestamp=2018-08-07T19:17:27.928417Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:27.928639Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:27.930186Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz889xh6k Pod phase: Pending level=info timestamp=2018-08-07T19:44:19.685477Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:44:19.686591Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[OnDefineDomain:[0xc420356a00]]" level=info timestamp=2018-08-07T19:44:19.693122Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:44:30.881959Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:44:30.938664Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4" level=info timestamp=2018-08-07T19:44:30.942127Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:44:30.943090Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.820 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should call Collect and OnDefineDomain on the hook sidecar [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:67 Timed out after 90.015s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ STEP: Getting hook-sidecar logs level=info timestamp=2018-08-07T19:44:14.509842Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4 kind=VirtualMachineInstance uid=43337e79-9a7a-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz889xh6k" Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 19:48:26 http: TLS handshake error from 10.129.0.1:51870: EOF level=info timestamp=2018-08-07T19:48:27.910785Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:48:36 http: TLS handshake error from 10.129.0.1:51876: EOF 2018/08/07 19:48:46 http: TLS handshake error from 10.129.0.1:51882: EOF 2018/08/07 19:48:56 http: TLS handshake error from 10.129.0.1:51888: EOF level=info timestamp=2018-08-07T19:48:57.964538Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:49:06 http: TLS handshake error from 10.129.0.1:51894: EOF 2018/08/07 19:49:16 http: TLS handshake error from 10.129.0.1:51900: EOF 2018/08/07 19:49:26 http: TLS handshake error from 10.129.0.1:51906: EOF level=info timestamp=2018-08-07T19:49:27.884365Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:49:36 http: TLS handshake error from 10.129.0.1:51912: EOF 2018/08/07 19:49:46 http: TLS handshake error from 10.129.0.1:51918: EOF 2018/08/07 19:49:56 http: TLS handshake error from 10.129.0.1:51924: EOF level=info timestamp=2018-08-07T19:49:57.900635Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:50:06 http: TLS handshake error from 10.129.0.1:51930: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T19:49:19.855808Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/08/07 19:49:26 http: TLS handshake error from 10.128.0.1:56590: EOF level=info timestamp=2018-08-07T19:49:27.343207Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:49:36 http: TLS handshake error from 10.128.0.1:56642: EOF level=info timestamp=2018-08-07T19:49:36.881043Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:49:37.555460Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:49:46 http: TLS handshake error from 10.128.0.1:56690: EOF level=info timestamp=2018-08-07T19:49:47.824561Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:49:47.878512Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:49:47.907633Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:49:56 http: TLS handshake error from 10.128.0.1:56734: EOF level=info timestamp=2018-08-07T19:49:58.058389Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:50:06 http: TLS handshake error from 10.128.0.1:56780: EOF level=info timestamp=2018-08-07T19:50:07.115869Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:50:08.305940Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T19:41:10.628945Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm kind= uid=d5c0dc8c-9a79-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:41:10.629010Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm kind= uid=d5c0dc8c-9a79-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:41:11.611539Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm" level=info timestamp=2018-08-07T19:41:12.234183Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm" level=info timestamp=2018-08-07T19:41:13.451465Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hghsnq528fvjq9tpgxrd8wc8jshp kind= uid=d76ddd3b-9a79-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:41:13.452753Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hghsnq528fvjq9tpgxrd8wc8jshp kind= uid=d76ddd3b-9a79-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:44:14.043157Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hghsnq528fvjq9tpgxrd8wc8jshp\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hghsnq528fvjq9tpgxrd8wc8jshp, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d76ddd3b-9a79-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hghsnq528fvjq9tpgxrd8wc8jshp" level=info timestamp=2018-08-07T19:44:14.251245Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4 kind= uid=43337e79-9a7a-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:44:14.254650Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4 kind= uid=43337e79-9a7a-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:44:14.422756Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4" level=info timestamp=2018-08-07T19:44:14.480626Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4" level=info timestamp=2018-08-07T19:47:14.857146Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 43337e79-9a7a-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4" level=info timestamp=2018-08-07T19:47:15.054202Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5h6lc76dp2slrsvdzxbfwqtbmp4x kind= uid=aef7a830-9a7a-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:47:15.054846Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5h6lc76dp2slrsvdzxbfwqtbmp4x kind= uid=aef7a830-9a7a-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:47:15.241931Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5h6lc76dp2slrsvdzxbfwqtbmp4x\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5h6lc76dp2slrsvdzxbfwqtbmp4x" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T19:17:14.718660Z pos=vm.go:749 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-07T19:17:14.719165Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.719265Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.719332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.719547Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.720965Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:14.721138Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.721192Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.721257Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.721378Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.721602Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:27.928104Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: false\n" level=info timestamp=2018-08-07T19:17:27.928417Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:27.928639Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:27.930186Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5hj2lr5 Pod phase: Pending level=info timestamp=2018-08-07T19:47:20.693290Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:47:20.698123Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[OnDefineDomain:[0xc420494840]]" level=info timestamp=2018-08-07T19:47:20.710098Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:47:31.920381Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:47:31.979753Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5h6lc76dp2slrsvdzxbfwqtbmp4x" level=info timestamp=2018-08-07T19:47:31.984882Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:47:31.985559Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.812 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should update domain XML with SM BIOS properties [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:83 Timed out after 90.011s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ STEP: Reading domain XML using virsh level=info timestamp=2018-08-07T19:47:15.338356Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5h6lc76dp2slrsvdzxbfwqtbmp4x kind=VirtualMachineInstance uid=aef7a830-9a7a-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5hj2lr5" Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running level=error timestamp=2018-08-07T19:50:22.033077Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:50:22.033702Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4/console proto=HTTP/1.1 statusCode=400 contentLength=90 2018/08/07 19:50:26 http: TLS handshake error from 10.129.0.1:51942: EOF level=error timestamp=2018-08-07T19:50:27.678541Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:50:27.679036Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=info timestamp=2018-08-07T19:50:27.834223Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:50:36 http: TLS handshake error from 10.129.0.1:51948: EOF level=error timestamp=2018-08-07T19:50:37.905952Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:50:37.906304Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:50:40.159771Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:50:40.160066Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:50:41.338713Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:50:41.339016Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:50:44.743339Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:50:44.743632Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4/console proto=HTTP/1.1 statusCode=400 contentLength=90 Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=error timestamp=2018-08-07T19:50:35.320541Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:50:35.320859Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:50:36.451017Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:50:36.451259Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4/console proto=HTTP/1.1 statusCode=400 contentLength=90 2018/08/07 19:50:36 http: TLS handshake error from 10.128.0.1:56952: EOF level=info timestamp=2018-08-07T19:50:37.321713Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=error timestamp=2018-08-07T19:50:38.690472Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:50:38.690718Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=info timestamp=2018-08-07T19:50:38.972820Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=error timestamp=2018-08-07T19:50:42.136548Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:50:42.137195Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:50:43.280118Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:50:43.280637Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:50:45.614721Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:50:45.615253Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4/console proto=HTTP/1.1 statusCode=400 contentLength=90 Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T19:41:12.234183Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi92cgtjr7wfn9rsdqg9ptx4qvdxvmx45dwj4j95b2l9m48645pbx886hm" level=info timestamp=2018-08-07T19:41:13.451465Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hghsnq528fvjq9tpgxrd8wc8jshp kind= uid=d76ddd3b-9a79-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:41:13.452753Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hghsnq528fvjq9tpgxrd8wc8jshp kind= uid=d76ddd3b-9a79-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:44:14.043157Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hghsnq528fvjq9tpgxrd8wc8jshp\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hghsnq528fvjq9tpgxrd8wc8jshp, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d76ddd3b-9a79-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikw2mt86n65jg2frnjh4b4hnwvggjkw7s2q7hghsnq528fvjq9tpgxrd8wc8jshp" level=info timestamp=2018-08-07T19:44:14.251245Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4 kind= uid=43337e79-9a7a-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:44:14.254650Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4 kind= uid=43337e79-9a7a-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:44:14.422756Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4" level=info timestamp=2018-08-07T19:44:14.480626Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4" level=info timestamp=2018-08-07T19:47:14.857146Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 43337e79-9a7a-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4" level=info timestamp=2018-08-07T19:47:15.054202Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5h6lc76dp2slrsvdzxbfwqtbmp4x kind= uid=aef7a830-9a7a-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:47:15.054846Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5h6lc76dp2slrsvdzxbfwqtbmp4x kind= uid=aef7a830-9a7a-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:47:15.241931Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5h6lc76dp2slrsvdzxbfwqtbmp4x\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5h6lc76dp2slrsvdzxbfwqtbmp4x" level=info timestamp=2018-08-07T19:50:15.950046Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4 kind= uid=1ac7afac-9a7b-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:50:15.951021Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4 kind= uid=1ac7afac-9a7b-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:50:16.174204Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T19:17:14.718660Z pos=vm.go:749 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-07T19:17:14.719165Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.719265Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.719332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.719547Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.720965Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:14.721138Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.721192Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.721257Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.721378Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.721602Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:27.928104Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: false\n" level=info timestamp=2018-08-07T19:17:27.928417Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:27.928639Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:27.930186Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mkcx6w Pod phase: Pending level=info timestamp=2018-08-07T19:50:20.704927Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:50:20.705191Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T19:50:20.707500Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:50:31.226886Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:50:31.314093Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4" level=info timestamp=2018-08-07T19:50:31.317146Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:50:31.317719Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [30.843 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 with a cirros image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:67 should return that we are running cirros [It] /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:68 Expected error: <*errors.errorString | 0xc4203eca00>: { s: "Timeout trying to connect to the virtual machine instance", } Timeout trying to connect to the virtual machine instance not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:55 ------------------------------ STEP: Creating a new VirtualMachineInstance STEP: Expecting the VirtualMachineInstance console Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running level=info timestamp=2018-08-07T19:51:05.322833Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz/console proto=HTTP/1.1 statusCode=400 contentLength=90 2018/08/07 19:51:06 http: TLS handshake error from 10.129.0.1:51968: EOF level=error timestamp=2018-08-07T19:51:07.563605Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:07.564071Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:51:09.897568Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:09.897831Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:51:11.020723Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:11.020919Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:51:12.176820Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:12.177070Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:51:13.269902Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:13.270369Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz/console proto=HTTP/1.1 statusCode=400 contentLength=90 2018/08/07 19:51:16 http: TLS handshake error from 10.129.0.1:51974: EOF level=error timestamp=2018-08-07T19:51:16.676331Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:16.676861Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz/console proto=HTTP/1.1 statusCode=400 contentLength=90 Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T19:50:59.493343Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=error timestamp=2018-08-07T19:51:02.771231Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:02.771673Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:51:06.113559Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:06.113880Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz/console proto=HTTP/1.1 statusCode=400 contentLength=90 2018/08/07 19:51:06 http: TLS handshake error from 10.128.0.1:57136: EOF level=info timestamp=2018-08-07T19:51:07.525937Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=error timestamp=2018-08-07T19:51:08.354220Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:08.354529Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=info timestamp=2018-08-07T19:51:09.769840Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=error timestamp=2018-08-07T19:51:14.097493Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:14.097892Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:51:15.216118Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:15.216360Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz/console proto=HTTP/1.1 statusCode=400 contentLength=90 2018/08/07 19:51:16 http: TLS handshake error from 10.128.0.1:57198: EOF Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T19:44:14.251245Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4 kind= uid=43337e79-9a7a-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:44:14.254650Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4 kind= uid=43337e79-9a7a-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:44:14.422756Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4" level=info timestamp=2018-08-07T19:44:14.480626Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4" level=info timestamp=2018-08-07T19:47:14.857146Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 43337e79-9a7a-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4" level=info timestamp=2018-08-07T19:47:15.054202Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5h6lc76dp2slrsvdzxbfwqtbmp4x kind= uid=aef7a830-9a7a-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:47:15.054846Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5h6lc76dp2slrsvdzxbfwqtbmp4x kind= uid=aef7a830-9a7a-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:47:15.241931Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5h6lc76dp2slrsvdzxbfwqtbmp4x\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5h6lc76dp2slrsvdzxbfwqtbmp4x" level=info timestamp=2018-08-07T19:50:15.950046Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4 kind= uid=1ac7afac-9a7b-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:50:15.951021Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4 kind= uid=1ac7afac-9a7b-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:50:16.174204Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4" level=info timestamp=2018-08-07T19:50:46.526760Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1ac7afac-9a7b-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4" level=info timestamp=2018-08-07T19:50:46.546896Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1ac7afac-9a7b-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4" level=info timestamp=2018-08-07T19:50:46.865006Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz kind= uid=2d369135-9a7b-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:50:46.865524Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz kind= uid=2d369135-9a7b-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T19:17:14.718660Z pos=vm.go:749 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-07T19:17:14.719165Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.719265Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.719332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.719547Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.720965Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:14.721138Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.721192Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.721257Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.721378Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.721602Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:27.928104Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: false\n" level=info timestamp=2018-08-07T19:17:27.928417Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:27.928639Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:27.930186Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm2sxtm Pod phase: Pending level=info timestamp=2018-08-07T19:50:51.731116Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:50:51.732633Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T19:50:51.743030Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:51:02.916199Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:51:02.949368Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz" level=info timestamp=2018-08-07T19:51:02.950883Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:51:02.951214Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [30.908 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 with a fedora image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:77 should return that we are running fedora [It] /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:78 Expected error: <*errors.errorString | 0xc420782eb0>: { s: "Timeout trying to connect to the virtual machine instance", } Timeout trying to connect to the virtual machine instance not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:55 ------------------------------ STEP: Creating a new VirtualMachineInstance STEP: Expecting the VirtualMachineInstance console Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running level=info timestamp=2018-08-07T19:51:26.748051Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:51:30.187954Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:30.188809Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:51:31.325118Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:31.325344Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:51:33.586237Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:33.586884Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57/console proto=HTTP/1.1 statusCode=400 contentLength=90 2018/08/07 19:51:36 http: TLS handshake error from 10.129.0.1:51986: EOF level=error timestamp=2018-08-07T19:51:41.495870Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:41.496480Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:51:44.870150Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:44.870708Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:51:46.026740Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:46.027049Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57/console proto=HTTP/1.1 statusCode=400 contentLength=90 2018/08/07 19:51:46 http: TLS handshake error from 10.129.0.1:51992: EOF Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T19:51:37.733102Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=error timestamp=2018-08-07T19:51:37.759345Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:37.759533Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:51:38.905438Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:38.905820Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:51:40.026646Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:40.026888Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=info timestamp=2018-08-07T19:51:40.323017Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=error timestamp=2018-08-07T19:51:42.283579Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:42.285620Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:51:43.409922Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:43.410207Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57/console proto=HTTP/1.1 statusCode=400 contentLength=90 2018/08/07 19:51:46 http: TLS handshake error from 10.128.0.1:57382: EOF level=error timestamp=2018-08-07T19:51:46.814428Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:51:46.814650Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57/console proto=HTTP/1.1 statusCode=400 contentLength=90 Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T19:47:14.857146Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 43337e79-9a7a-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2pxfzjm8btq94q68wnsbd2z95lpp9zmh5pz88nvq8fjbcwjf47knhr5z478l7r4" level=info timestamp=2018-08-07T19:47:15.054202Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5h6lc76dp2slrsvdzxbfwqtbmp4x kind= uid=aef7a830-9a7a-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:47:15.054846Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5h6lc76dp2slrsvdzxbfwqtbmp4x kind= uid=aef7a830-9a7a-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:47:15.241931Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5h6lc76dp2slrsvdzxbfwqtbmp4x\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5h6lc76dp2slrsvdzxbfwqtbmp4x" level=info timestamp=2018-08-07T19:50:15.950046Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4 kind= uid=1ac7afac-9a7b-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:50:15.951021Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4 kind= uid=1ac7afac-9a7b-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:50:16.174204Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4" level=info timestamp=2018-08-07T19:50:46.526760Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1ac7afac-9a7b-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4" level=info timestamp=2018-08-07T19:50:46.546896Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1ac7afac-9a7b-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4" level=info timestamp=2018-08-07T19:50:46.865006Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz kind= uid=2d369135-9a7b-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:50:46.865524Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz kind= uid=2d369135-9a7b-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:51:17.423272Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2d369135-9a7b-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz" level=info timestamp=2018-08-07T19:51:17.580170Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57 kind= uid=3f8790a6-9a7b-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:51:17.580922Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57 kind= uid=3f8790a6-9a7b-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:51:17.727613Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T19:17:14.718660Z pos=vm.go:749 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-07T19:17:14.719165Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.719265Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.719332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.719547Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.720965Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:14.721138Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.721192Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.721257Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.721378Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.721602Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:27.928104Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: false\n" level=info timestamp=2018-08-07T19:17:27.928417Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:27.928639Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:27.930186Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmx8rb5w Pod phase: Pending level=info timestamp=2018-08-07T19:51:21.456054Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:51:21.456360Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T19:51:21.458165Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:51:32.069117Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:51:32.176162Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57" level=info timestamp=2018-08-07T19:51:32.179886Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:51:32.180968Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [30.670 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should be able to reconnect to console multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:87 Expected error: <*errors.errorString | 0xc420438810>: { s: "Timeout trying to connect to the virtual machine instance", } Timeout trying to connect to the virtual machine instance not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:100 ------------------------------ STEP: Creating a new VirtualMachineInstance STEP: Checking that the console output equals to expected one Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running level=error timestamp=2018-08-07T19:52:07.915859Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:52:07.916192Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmibmf4wc7jzspv2vh2nb9r5pg29l4rv42s742thmxc57lnjq974sk5fd4wftcjb2c/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:52:10.176368Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:52:10.176779Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmibmf4wc7jzspv2vh2nb9r5pg29l4rv42s742thmxc57lnjq974sk5fd4wftcjb2c/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:52:11.334585Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:52:11.335291Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmibmf4wc7jzspv2vh2nb9r5pg29l4rv42s742thmxc57lnjq974sk5fd4wftcjb2c/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:52:14.703989Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:52:14.704615Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmibmf4wc7jzspv2vh2nb9r5pg29l4rv42s742thmxc57lnjq974sk5fd4wftcjb2c/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:52:15.834205Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:52:15.834566Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmibmf4wc7jzspv2vh2nb9r5pg29l4rv42s742thmxc57lnjq974sk5fd4wftcjb2c/console proto=HTTP/1.1 statusCode=400 contentLength=90 2018/08/07 19:52:16 http: TLS handshake error from 10.129.0.1:52010: EOF level=error timestamp=2018-08-07T19:52:16.957647Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:52:16.957863Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmibmf4wc7jzspv2vh2nb9r5pg29l4rv42s742thmxc57lnjq974sk5fd4wftcjb2c/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:52:18.086884Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:52:18.087246Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmibmf4wc7jzspv2vh2nb9r5pg29l4rv42s742thmxc57lnjq974sk5fd4wftcjb2c/console proto=HTTP/1.1 statusCode=400 contentLength=90 Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T19:52:01.941488Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmibmf4wc7jzspv2vh2nb9r5pg29l4rv42s742thmxc57lnjq974sk5fd4wftcjb2c/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:52:03.065296Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:52:03.065508Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmibmf4wc7jzspv2vh2nb9r5pg29l4rv42s742thmxc57lnjq974sk5fd4wftcjb2c/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:52:05.319668Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:52:05.320046Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmibmf4wc7jzspv2vh2nb9r5pg29l4rv42s742thmxc57lnjq974sk5fd4wftcjb2c/console proto=HTTP/1.1 statusCode=400 contentLength=90 2018/08/07 19:52:06 http: TLS handshake error from 10.128.0.1:57508: EOF level=info timestamp=2018-08-07T19:52:07.942289Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=error timestamp=2018-08-07T19:52:08.703662Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:52:08.703942Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmibmf4wc7jzspv2vh2nb9r5pg29l4rv42s742thmxc57lnjq974sk5fd4wftcjb2c/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=info timestamp=2018-08-07T19:52:11.106649Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=error timestamp=2018-08-07T19:52:12.110959Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:52:12.111153Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmibmf4wc7jzspv2vh2nb9r5pg29l4rv42s742thmxc57lnjq974sk5fd4wftcjb2c/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-07T19:52:13.244426Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-07T19:52:13.246464Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmibmf4wc7jzspv2vh2nb9r5pg29l4rv42s742thmxc57lnjq974sk5fd4wftcjb2c/console proto=HTTP/1.1 statusCode=400 contentLength=90 2018/08/07 19:52:16 http: TLS handshake error from 10.128.0.1:57568: EOF Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T18:27:09.374110Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-m6vfj Pod phase: Running level=info timestamp=2018-08-07T19:47:15.241931Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5h6lc76dp2slrsvdzxbfwqtbmp4x\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikwbcjqtlrmsg8f4dvdz6qcp66m8tjhn5m6n5h6lc76dp2slrsvdzxbfwqtbmp4x" level=info timestamp=2018-08-07T19:50:15.950046Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4 kind= uid=1ac7afac-9a7b-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:50:15.951021Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4 kind= uid=1ac7afac-9a7b-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:50:16.174204Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4" level=info timestamp=2018-08-07T19:50:46.526760Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1ac7afac-9a7b-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4" level=info timestamp=2018-08-07T19:50:46.546896Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1ac7afac-9a7b-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminqkz5sqwf5x222h2v6t84hjkxlzfnx87bsl2mspz7v4r9djfqgxbnw7z257jrc4" level=info timestamp=2018-08-07T19:50:46.865006Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz kind= uid=2d369135-9a7b-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:50:46.865524Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz kind= uid=2d369135-9a7b-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:51:17.423272Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2d369135-9a7b-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv5vltwrt7grltx7tcncdhbrqf9xgjwqsgqgzm76r49r6jfn5xw57pt568n7s8tz" level=info timestamp=2018-08-07T19:51:17.580170Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57 kind= uid=3f8790a6-9a7b-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:51:17.580922Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57 kind= uid=3f8790a6-9a7b-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:51:17.727613Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57" level=info timestamp=2018-08-07T19:51:48.099502Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3f8790a6-9a7b-11e8-b575-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqkg5nrfns95xzgmw8g8jstvlhvknc8nxvdbmxskd688n46cjjxldrjfxd9jxh57" level=info timestamp=2018-08-07T19:51:48.337759Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibmf4wc7jzspv2vh2nb9r5pg29l4rv42s742thmxc57lnjq974sk5fd4wftcjb2c kind= uid=51dc1d6f-9a7b-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:51:48.338213Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibmf4wc7jzspv2vh2nb9r5pg29l4rv42s742thmxc57lnjq974sk5fd4wftcjb2c kind= uid=51dc1d6f-9a7b-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T19:17:14.718660Z pos=vm.go:749 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-07T19:17:14.719165Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.719265Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.719332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.719547Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.720965Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:14.721138Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.721192Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.721257Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.721378Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.721602Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:27.928104Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: false\n" level=info timestamp=2018-08-07T19:17:27.928417Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:27.928639Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:27.930186Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T18:27:06.212615Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-08-07T18:27:06.254524Z pos=vm.go:213 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-07T18:27:06.256873Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-07T18:27:06.354993Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-07T18:27:06.385761Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-08-07T18:27:06.392559Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmibmf4wc7jzspv2vh2nb9r5pg29l4rv42s742th8z97k Pod phase: Pending level=info timestamp=2018-08-07T19:51:53.241024Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:51:53.241387Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T19:51:53.243328Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:52:03.585114Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:52:03.671109Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmibmf4wc7jzspv2vh2nb9r5pg29l4rv42s742thmxc57lnjq974sk5fd4wftcjb2c" level=info timestamp=2018-08-07T19:52:03.674325Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:52:03.675059Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [30.819 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should wait until the virtual machine is in running state and return a stream interface [It] /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:103 Expected error: <*errors.errorString | 0xc420af7770>: { s: "Timeout trying to connect to the virtual machine instance", } Timeout trying to connect to the virtual machine instance not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:109 ------------------------------ STEP: Creating a new VirtualMachineInstance • [SLOW TEST:30.374 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should fail waiting for the virtual machine instance to be running /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:111 ------------------------------ • [SLOW TEST:30.387 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should fail waiting for the expecter /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:134 ------------------------------ • [SLOW TEST:41.667 seconds] LeaderElection /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:43 Start a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:53 when the controller pod is not running /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:54 should success /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:55 ------------------------------ Pod name: disks-images-provider-2c85x Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-78krt Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-2n99b Pod phase: Running 2018/08/07 19:55:06 http: TLS handshake error from 10.129.0.1:52110: EOF 2018/08/07 19:55:16 http: TLS handshake error from 10.129.0.1:52116: EOF 2018/08/07 19:55:26 http: TLS handshake error from 10.129.0.1:52122: EOF 2018/08/07 19:55:36 http: TLS handshake error from 10.129.0.1:52128: EOF 2018/08/07 19:55:46 http: TLS handshake error from 10.129.0.1:52134: EOF 2018/08/07 19:55:56 http: TLS handshake error from 10.129.0.1:52140: EOF level=info timestamp=2018-08-07T19:55:57.466642Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:56:06 http: TLS handshake error from 10.129.0.1:52146: EOF 2018/08/07 19:56:16 http: TLS handshake error from 10.129.0.1:52152: EOF 2018/08/07 19:56:26 http: TLS handshake error from 10.129.0.1:52158: EOF level=info timestamp=2018-08-07T19:56:27.909384Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/07 19:56:36 http: TLS handshake error from 10.129.0.1:52164: EOF 2018/08/07 19:56:46 http: TLS handshake error from 10.129.0.1:52170: EOF 2018/08/07 19:56:56 http: TLS handshake error from 10.129.0.1:52176: EOF level=info timestamp=2018-08-07T19:56:57.942462Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-2x6zh Pod phase: Running level=info timestamp=2018-08-07T19:56:22.022297Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:56:26 http: TLS handshake error from 10.128.0.1:58808: EOF level=info timestamp=2018-08-07T19:56:26.767689Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:56:36 http: TLS handshake error from 10.128.0.1:58854: EOF level=info timestamp=2018-08-07T19:56:36.983600Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:56:40.178906Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:56:46 http: TLS handshake error from 10.128.0.1:58898: EOF level=info timestamp=2018-08-07T19:56:47.250654Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:56:52.411309Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:56:52.425867Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:56:56.112069Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:56:56.156636Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-07T19:56:56.181240Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/07 19:56:56 http: TLS handshake error from 10.128.0.1:58942: EOF level=info timestamp=2018-08-07T19:56:57.497917Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-8c5lj Pod phase: Running level=info timestamp=2018-08-07T19:53:23.922539Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-kch7q Pod phase: Running level=info timestamp=2018-08-07T19:53:38.154116Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-08-07T19:53:38.154288Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer limitrangeInformer" level=info timestamp=2018-08-07T19:53:38.154417Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-08-07T19:53:38.154554Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-08-07T19:53:38.162077Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-08-07T19:53:38.163634Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-08-07T19:53:38.163982Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-08-07T19:53:38.164172Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-08-07T19:53:38.164417Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-08-07T19:53:39.832675Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitbmxntqlbsdws8p2pgt2hqjw9wlqjtc2jp2zmtv6jxxwvd9w92ntkvzbhgzvlqd kind= uid=94827e7b-9a7b-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:53:39.833275Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitbmxntqlbsdws8p2pgt2hqjw9wlqjtc2jp2zmtv6jxxwvd9w92ntkvzbhgzvlqd kind= uid=94827e7b-9a7b-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:53:40.128318Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitbmxntqlbsdws8p2pgt2hqjw9wlqjtc2jp2zmtv6jxxwvd9w92ntkvzbhgzvlqd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitbmxntqlbsdws8p2pgt2hqjw9wlqjtc2jp2zmtv6jxxwvd9w92ntkvzbhgzvlqd" level=info timestamp=2018-08-07T19:54:01.352836Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwklnfbbhmjhqv2td7zj99b64v22kwk29snwwhwcjn5kkxrt48dhczbr7snh65ll kind= uid=a153f6b9-9a7b-11e8-b575-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-07T19:54:01.365937Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwklnfbbhmjhqv2td7zj99b64v22kwk29snwwhwcjn5kkxrt48dhczbr7snh65ll kind= uid=a153f6b9-9a7b-11e8-b575-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-07T19:54:01.592697Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwklnfbbhmjhqv2td7zj99b64v22kwk29snwwhwcjn5kkxrt48dhczbr7snh65ll\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwklnfbbhmjhqv2td7zj99b64v22kwk29snwwhwcjn5kkxrt48dhczbr7snh65ll" Pod name: virt-handler-x58fz Pod phase: Running level=info timestamp=2018-08-07T19:17:14.718660Z pos=vm.go:749 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-07T19:17:14.719165Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.719265Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.719332Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.719547Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.720965Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:14.721138Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: true\n" level=info timestamp=2018-08-07T19:17:14.721192Z pos=vm.go:322 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-07T19:17:14.721257Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:14.721378Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:14.721602Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind= uid=68e15058-9a76-11e8-b575-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:17:27.928104Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq, existing: false\n" level=info timestamp=2018-08-07T19:17:27.928417Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:17:27.928639Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:17:27.930186Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-alternative name=testvmiwck244crsv2zgsj9fl67hzv72vjlh4t5ngwpkmvgj7t9rg6q5zlvvrjwswll5rq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-xnssb Pod phase: Running level=info timestamp=2018-08-07T19:54:02.177798Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmitbmxntqlbsdws8p2pgt2hqjw9wlqjtc2jp2zmtv6jxxwvd9w92ntkvzbhgzvlqd, existing: false\n" level=info timestamp=2018-08-07T19:54:02.177952Z pos=vm.go:336 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-07T19:54:02.178020Z pos=vm.go:338 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-07T19:54:02.178243Z pos=vm.go:365 component=virt-handler namespace=kubevirt-test-default name=testvmitbmxntqlbsdws8p2pgt2hqjw9wlqjtc2jp2zmtv6jxxwvd9w92ntkvzbhgzvlqd kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-07T19:54:02.178356Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmitbmxntqlbsdws8p2pgt2hqjw9wlqjtc2jp2zmtv6jxxwvd9w92ntkvzbhgzvlqd kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-07T19:54:02.178810Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmitbmxntqlbsdws8p2pgt2hqjw9wlqjtc2jp2zmtv6jxxwvd9w92ntkvzbhgzvlqd kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:54:51.260954Z pos=vm.go:749 component=virt-handler namespace=kubevirt-test-default name=testvmitbmxntqlbsdws8p2pgt2hqjw9wlqjtc2jp2zmtv6jxxwvd9w92ntkvzbhgzvlqd kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-07T19:54:51.262535Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmitbmxntqlbsdws8p2pgt2hqjw9wlqjtc2jp2zmtv6jxxwvd9w92ntkvzbhgzvlqd, existing: false\n" level=info timestamp=2018-08-07T19:54:51.262712Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:54:51.262969Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmitbmxntqlbsdws8p2pgt2hqjw9wlqjtc2jp2zmtv6jxxwvd9w92ntkvzbhgzvlqd kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:54:51.263639Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmitbmxntqlbsdws8p2pgt2hqjw9wlqjtc2jp2zmtv6jxxwvd9w92ntkvzbhgzvlqd kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-07T19:54:51.265098Z pos=vm.go:320 component=virt-handler msg="Processing vmi testvmitbmxntqlbsdws8p2pgt2hqjw9wlqjtc2jp2zmtv6jxxwvd9w92ntkvzbhgzvlqd, existing: false\n" level=info timestamp=2018-08-07T19:54:51.265495Z pos=vm.go:336 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-07T19:54:51.265773Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmitbmxntqlbsdws8p2pgt2hqjw9wlqjtc2jp2zmtv6jxxwvd9w92ntkvzbhgzvlqd kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-07T19:54:51.266085Z pos=vm.go:447 component=virt-handler namespace=kubevirt-test-default name=testvmitbmxntqlbsdws8p2pgt2hqjw9wlqjtc2jp2zmtv6jxxwvd9w92ntkvzbhgzvlqd kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiwklnfbbhmjhqv2td7zj99b64v22kwk29snwwh8gq9x Pod phase: Pending level=info timestamp=2018-08-07T19:54:06.004165Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-07T19:54:06.004374Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-07T19:54:06.006548Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-07T19:54:17.274902Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-07T19:54:17.374318Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiwklnfbbhmjhqv2td7zj99b64v22kwk29snwwhwcjn5kkxrt48dhczbr7snh65ll" level=info timestamp=2018-08-07T19:54:17.376180Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-07T19:54:17.376650Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [181.091 seconds] Health Monitoring /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:37 A VirtualMachineInstance with a watchdog device /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:56 should be shut down when the watchdog expires [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:57 Timed out after 90.012s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 ------------------------------ STEP: Starting a VirtualMachineInstance level=info timestamp=2018-08-07T19:54:01.882458Z pos=utils.go:246 component=tests namespace=kubevirt-test-default name=testvmiwklnfbbhmjhqv2td7zj99b64v22kwk29snwwhwcjn5kkxrt48dhczbr7snh65ll kind=VirtualMachineInstance uid=a153f6b9-9a7b-11e8-b575-525500d15501 msg="Created virtual machine pod virt-launcher-testvmiwklnfbbhmjhqv2td7zj99b64v22kwk29snwwh8gq9x" •panic: test timed out after 1h30m0s goroutine 14726 [running]: testing.(*M).startAlarm.func1() /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1240 +0xfc created by time.goFunc /gimme/.gimme/versions/go1.10.linux.amd64/src/time/sleep.go:172 +0x44 goroutine 1 [chan receive, 90 minutes]: testing.(*T).Run(0xc4205705a0, 0x139fc95, 0x9, 0x1432278, 0x4801e6) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:825 +0x301 testing.runTests.func1(0xc4205704b0) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1063 +0x64 testing.tRunner(0xc4205704b0, 0xc4208dddf8) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:777 +0xd0 testing.runTests(0xc4208e94e0, 0x1d34a50, 0x1, 0x1, 0x412009) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1061 +0x2c4 testing.(*M).Run(0xc420578300, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:978 +0x171 main.main() _testmain.go:44 +0x151 goroutine 20 [chan receive]: kubevirt.io/kubevirt/vendor/github.com/golang/glog.(*loggingT).flushDaemon(0x1d60280) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/golang/glog/glog.go:879 +0x8b created by kubevirt.io/kubevirt/vendor/github.com/golang/glog.init.0 /root/go/src/kubevirt.io/kubevirt/vendor/github.com/golang/glog/glog.go:410 +0x203 goroutine 21 [syscall, 90 minutes]: os/signal.signal_recv(0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/sigqueue.go:139 +0xa6 os/signal.loop() /gimme/.gimme/versions/go1.10.linux.amd64/src/os/signal/signal_unix.go:22 +0x22 created by os/signal.init.0 /gimme/.gimme/versions/go1.10.linux.amd64/src/os/signal/signal_unix.go:28 +0x41 goroutine 10 [select]: kubevirt.io/kubevirt/tests.(*ObjectEventWatcher).Watch(0xc420feefe8, 0xc4208a6db0) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:286 +0x579 kubevirt.io/kubevirt/tests.(*ObjectEventWatcher).WaitFor(0xc420feefe8, 0x139ba31, 0x6, 0x11e4920, 0x14b2710, 0x0) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:296 +0xba kubevirt.io/kubevirt/tests.waitForVMIStart(0x14bbd60, 0xc421062a00, 0x5a, 0x0, 0x1, 0xc420af7f01) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1064 +0x50e kubevirt.io/kubevirt/tests.WaitForSuccessfulVMIStart(0x14bbd60, 0xc421062a00, 0x1d7e938, 0xc420af7f30) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1101 +0x43 kubevirt.io/kubevirt/tests_test.glob..func20.1(0xc421062280) /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:63 +0x355 kubevirt.io/kubevirt/tests_test.glob..func20.4.1.1() /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:88 +0xfc kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*runner).runSync(0xc42012cc60, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/runner.go:113 +0x9c kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*runner).run(0xc42012cc60, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/runner.go:64 +0x13e kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*ItNode).Run(0xc42086ac00, 0x14b8200, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/it_node.go:26 +0x7f kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec.(*Spec).runSample(0xc420866870, 0x0, 0x14b8200, 0xc4200ff480) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec/spec.go:203 +0x648 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec.(*Spec).Run(0xc420866870, 0x14b8200, 0xc4200ff480) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec/spec.go:138 +0xff kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).runSpec(0xc42095e780, 0xc420866870, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:200 +0x10d kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).runSpecs(0xc42095e780, 0x1) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:170 +0x329 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).Run(0xc42095e780, 0xb) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:66 +0x11b kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/suite.(*Suite).Run(0xc4200fcaf0, 0x7f7a4caf2f88, 0xc4205705a0, 0x13a2278, 0xb, 0xc4208e9520, 0x2, 0x2, 0x14d4b00, 0xc4200ff480, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/suite/suite.go:62 +0x27c kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo.RunSpecsWithCustomReporters(0x14b9260, 0xc4205705a0, 0x13a2278, 0xb, 0xc4208e9500, 0x2, 0x2, 0x2) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/ginkgo_dsl.go:221 +0x258 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo.RunSpecsWithDefaultAndCustomReporters(0x14b9260, 0xc4205705a0, 0x13a2278, 0xb, 0xc4203c8260, 0x1, 0x1, 0x1) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/ginkgo_dsl.go:209 +0xab kubevirt.io/kubevirt/tests_test.TestTests(0xc4205705a0) /root/go/src/kubevirt.io/kubevirt/tests/tests_suite_test.go:43 +0xaa testing.tRunner(0xc4205705a0, 0x1432278) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:777 +0xd0 created by testing.(*T).Run /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:824 +0x2e0 goroutine 11 [chan receive, 90 minutes]: kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).registerForInterrupts(0xc42095e780, 0xc4206e9140) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:223 +0xd1 created by kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).Run /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:60 +0x88 goroutine 12 [select, 90 minutes, locked to thread]: runtime.gopark(0x1434450, 0x0, 0x139c7b7, 0x6, 0x18, 0x1) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/proc.go:291 +0x11a runtime.selectgo(0xc420486750, 0xc4206e9200) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/select.go:392 +0xe50 runtime.ensureSigM.func1() /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/signal_unix.go:549 +0x1f4 runtime.goexit() /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/asm_amd64.s:2361 +0x1 goroutine 66 [IO wait]: internal/poll.runtime_pollWait(0x7f7a4cb7bf00, 0x72, 0xc420e77850) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/netpoll.go:173 +0x57 internal/poll.(*pollDesc).wait(0xc4208ef818, 0x72, 0xffffffffffffff00, 0x14ba420, 0x1c4b7d0) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_poll_runtime.go:85 +0x9b internal/poll.(*pollDesc).waitRead(0xc4208ef818, 0xc4206f4000, 0x8000, 0x8000) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_poll_runtime.go:90 +0x3d internal/poll.(*FD).Read(0xc4208ef800, 0xc4206f4000, 0x8000, 0x8000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_unix.go:157 +0x17d net.(*netFD).Read(0xc4208ef800, 0xc4206f4000, 0x8000, 0x8000, 0x0, 0x8, 0x7ffb) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/fd_unix.go:202 +0x4f net.(*conn).Read(0xc420722318, 0xc4206f4000, 0x8000, 0x8000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/net.go:176 +0x6a crypto/tls.(*block).readFromUntil(0xc4208a6b70, 0x7f7a4caf3208, 0xc420722318, 0x5, 0xc420722318, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:493 +0x96 crypto/tls.(*Conn).readRecord(0xc42090e380, 0x1434517, 0xc42090e4a0, 0x20) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:595 +0xe0 crypto/tls.(*Conn).Read(0xc42090e380, 0xc42068e000, 0x1000, 0x1000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:1156 +0x100 bufio.(*Reader).Read(0xc4208e6c60, 0xc42082c498, 0x9, 0x9, 0xc42111f828, 0x1, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/bufio/bufio.go:216 +0x238 io.ReadAtLeast(0x14b7000, 0xc4208e6c60, 0xc42082c498, 0x9, 0x9, 0x9, 0xc420b5aea0, 0x43f2c1, 0xc420683680) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/io.go:309 +0x86 io.ReadFull(0x14b7000, 0xc4208e6c60, 0xc42082c498, 0x9, 0x9, 0x1434598, 0xc420e77d10, 0x462d33) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/io.go:327 +0x58 kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.readFrameHeader(0xc42082c498, 0x9, 0x9, 0x14b7000, 0xc4208e6c60, 0x0, 0xc400000000, 0x7efc20, 0xc420ec0528) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/frame.go:237 +0x7b kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*Framer).ReadFrame(0xc42082c460, 0xc4203ff140, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/frame.go:492 +0xa4 kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*clientConnReadLoop).run(0xc420e77fb0, 0x14331d0, 0xc420484fb0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:1428 +0x8e kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*ClientConn).readLoop(0xc42010bd40) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:1354 +0x76 created by kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*Transport).newClientConn /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:579 +0x651 goroutine 14720 [chan receive]: kubevirt.io/kubevirt/tests.(*ObjectEventWatcher).Watch.func3(0x14c1260, 0xc4203ff110, 0xc4202ae000, 0xc420049c80) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:277 +0x93 created by kubevirt.io/kubevirt/tests.(*ObjectEventWatcher).Watch /root/go/src/kubevirt.io/kubevirt/tests/utils.go:275 +0x4ae goroutine 14719 [semacquire]: sync.runtime_notifyListWait(0xc420ec0540, 0xc400000001) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/sema.go:510 +0x10b sync.(*Cond).Wait(0xc420ec0530) /gimme/.gimme/versions/go1.10.linux.amd64/src/sync/cond.go:56 +0x80 kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*pipe).Read(0xc420ec0528, 0xc420d61801, 0x5ff, 0x5ff, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/pipe.go:64 +0x8f kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.transportResponseBody.Read(0xc420ec0500, 0xc420d61801, 0x5ff, 0x5ff, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:1674 +0xa1 encoding/json.(*Decoder).refill(0xc4203d81e0, 0x835c0a, 0x9) /gimme/.gimme/versions/go1.10.linux.amd64/src/encoding/json/stream.go:159 +0x132 encoding/json.(*Decoder).readValue(0xc4203d81e0, 0x0, 0x0, 0x11f6880) /gimme/.gimme/versions/go1.10.linux.amd64/src/encoding/json/stream.go:134 +0x23d encoding/json.(*Decoder).Decode(0xc4203d81e0, 0x12127c0, 0xc4207cd8e0, 0x14bf020, 0xc420f66a00) /gimme/.gimme/versions/go1.10.linux.amd64/src/encoding/json/stream.go:63 +0x78 kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/util/framer.(*jsonFrameReader).Read(0xc4203ff0e0, 0xc42023ea80, 0x800, 0xa80, 0xc42077c340, 0x40, 0x38) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/util/framer/framer.go:150 +0x295 kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/runtime/serializer/streaming.(*decoder).Decode(0xc420c1bf40, 0x0, 0x14c0ce0, 0xc42077c340, 0x480, 0x14bf020, 0xc420049c78, 0x456ae0, 0xc420049c20) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/runtime/serializer/streaming/streaming.go:77 +0x95 kubevirt.io/kubevirt/vendor/k8s.io/client-go/rest/watch.(*Decoder).Decode(0xc4207cd4e0, 0xc420e73fa8, 0x5, 0x14bf020, 0xc420f66a00, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/client-go/rest/watch/decoder.go:49 +0x7c kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4203ff110) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:93 +0x12e created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 make: *** [functest] Error 2 + make cluster-down ./cluster/down.sh