+ export WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + [[ openshift-3.10-release =~ openshift-.* ]] + [[ openshift-3.10-release =~ .*-crio-.* ]] + export KUBEVIRT_PROVIDER=os-3.10.0 + KUBEVIRT_PROVIDER=os-3.10.0 + export KUBEVIRT_NUM_NODES=2 + KUBEVIRT_NUM_NODES=2 + export NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + export NAMESPACE=kube-system + NAMESPACE=kube-system + trap '{ make cluster-down; }' EXIT SIGINT SIGTERM SIGSTOP + make cluster-down ./cluster/down.sh + make cluster-up ./cluster/up.sh Downloading ....................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................... Downloading ....... 2018/07/26 08:52:28 Waiting for host: 192.168.66.102:22 2018/07/26 08:52:31 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/26 08:52:39 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/26 08:52:47 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/26 08:52:55 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/26 08:53:03 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/26 08:53:08 Connected to tcp://192.168.66.102:22 + systemctl stop origin-node.service + rm -rf /etc/origin/ /etc/etcd/ /var/lib/origin /var/lib/etcd/ ++ docker ps -q + containers= + '[' -n '' ']' ++ docker ps -q -a + containers='2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3' + '[' -n '2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3' ']' + docker rm -f 2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3 2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3 2018/07/26 08:53:13 Waiting for host: 192.168.66.101:22 2018/07/26 08:53:16 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/26 08:53:24 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/26 08:53:32 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/26 08:53:37 Connected to tcp://192.168.66.101:22 + inventory_file=/root/inventory + openshift_ansible=/root/openshift-ansible + echo '[new_nodes]' + sed -i '/\[OSEv3:children\]/a new_nodes' /root/inventory + nodes_found=false ++ seq 2 100 + for i in '$(seq 2 100)' ++ printf node%02d 2 + node=node02 ++ printf 192.168.66.1%02d 2 + node_ip=192.168.66.102 + set +e + ping 192.168.66.102 -c 1 PING 192.168.66.102 (192.168.66.102) 56(84) bytes of data. 64 bytes from 192.168.66.102: icmp_seq=1 ttl=64 time=0.708 ms --- 192.168.66.102 ping statistics --- 1 packets transmitted, 1 received, 0% packet loss, time 0ms rtt min/avg/max/mdev = 0.708/0.708/0.708/0.000 ms Found node02. Adding it to the inventory. + '[' 0 -ne 0 ']' + nodes_found=true + set -e + echo '192.168.66.102 node02' + echo 'Found node02. Adding it to the inventory.' + echo 'node02 openshift_node_group_name="node-config-compute" openshift_schedulable=true openshift_ip=192.168.66.102' + for i in '$(seq 2 100)' ++ printf node%02d 3 + node=node03 ++ printf 192.168.66.1%02d 3 + node_ip=192.168.66.103 + set +e + ping 192.168.66.103 -c 1 PING 192.168.66.103 (192.168.66.103) 56(84) bytes of data. From 192.168.66.101 icmp_seq=1 Destination Host Unreachable --- 192.168.66.103 ping statistics --- 1 packets transmitted, 0 received, +1 errors, 100% packet loss, time 0ms + '[' 1 -ne 0 ']' + break + '[' true = true ']' + ansible-playbook -i /root/inventory /root/openshift-ansible/playbooks/openshift-node/scaleup.yml PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) PLAY [Ensure there are new_nodes] ********************************************** TASK [fail] ******************************************************************** skipping: [localhost] TASK [fail] ******************************************************************** skipping: [localhost] PLAY [Initialization Checkpoint Start] ***************************************** TASK [Set install initialization 'In Progress'] ******************************** ok: [node01] PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) [WARNING]: Could not match supplied host pattern, ignoring: oo_lb_to_config PLAY [Ensure that all non-node hosts are accessible] *************************** TASK [Gathering Facts] ********************************************************* ok: [node01] PLAY [Initialize basic host facts] ********************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/deprecations.yml for node01, node02 TASK [openshift_sanitize_inventory : Check for usage of deprecated variables] *** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : debug] ************************************ skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_stats] ******************************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Assign deprecated variables to correct counterparts] *** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_logging.yml for node01, node02 included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_metrics.yml for node01, node02 TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Standardize on latest variable names] ***** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Normalize openshift_release] ************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Abort when openshift_release is invalid] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/unsupported.yml for node01, node02 TASK [openshift_sanitize_inventory : Ensure that openshift_use_dnsmasq is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that openshift_node_dnsmasq_install_network_manager_hook is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* skipping: [node01] => (item=openshift_hosted_etcd_storage_kind) skipping: [node02] => (item=openshift_hosted_etcd_storage_kind) TASK [openshift_sanitize_inventory : Ensure that dynamic provisioning is set if using dynamic storage] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure clusterid is set along with the cloudprovider] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure ansible_service_broker_remove and ansible_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure template_service_broker_remove and template_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that all requires vsphere configuration variables are set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : ensure provider configuration variables are defined] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure removed web console extension variables are not set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that web console port matches API server port] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : At least one master is schedulable] ******* skipping: [node01] skipping: [node02] TASK [Detecting Operating System from ostree_booted] *************************** ok: [node02] ok: [node01] TASK [set openshift_deployment_type if unset] ********************************** skipping: [node01] skipping: [node02] TASK [check for node already bootstrapped] ************************************* ok: [node02] ok: [node01] TASK [initialize_facts set fact openshift_is_bootstrapped] ********************* ok: [node01] ok: [node02] TASK [initialize_facts set fact openshift_is_atomic and openshift_is_containerized] *** ok: [node01] ok: [node02] TASK [Determine Atomic Host Docker Version] ************************************ skipping: [node01] skipping: [node02] TASK [assert atomic host docker version is 1.12 or later] ********************** skipping: [node01] skipping: [node02] PLAY [Retrieve existing master configs and validate] *************************** TASK [openshift_control_plane : stat] ****************************************** ok: [node01] TASK [openshift_control_plane : slurp] ***************************************** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [openshift_control_plane : Check for file paths outside of /etc/origin/master in master's config] *** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Initialize special first-master variables] ******************************* TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Disable web console if required] ***************************************** TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Setup yum repositories for all hosts] ************************************ TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Install Red Hat Subscription manager] ******************* skipping: [node02] TASK [rhel_subscribe : Is host already registered?] **************************** skipping: [node02] TASK [rhel_subscribe : Register host] ****************************************** skipping: [node02] TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Determine if OpenShift Pool Already Attached] *********** skipping: [node02] TASK [rhel_subscribe : Attach to OpenShift Pool] ******************************* skipping: [node02] TASK [rhel_subscribe : Satellite preparation] ********************************** skipping: [node02] TASK [openshift_repos : openshift_repos detect ostree] ************************* ok: [node02] TASK [openshift_repos : Ensure libselinux-python is installed] ***************** ok: [node02] TASK [openshift_repos : Remove openshift_additional.repo file] ***************** ok: [node02] TASK [openshift_repos : Create any additional repos that are defined] ********** TASK [openshift_repos : include_tasks] ***************************************** skipping: [node02] TASK [openshift_repos : include_tasks] ***************************************** included: /root/openshift-ansible/roles/openshift_repos/tasks/centos_repos.yml for node02 TASK [openshift_repos : Configure origin gpg keys] ***************************** ok: [node02] TASK [openshift_repos : Configure correct origin release repository] *********** ok: [node02] => (item=/root/openshift-ansible/roles/openshift_repos/templates/CentOS-OpenShift-Origin.repo.j2) TASK [openshift_repos : Ensure clean repo cache in the event repos have been changed manually] *** changed: [node02] => { "msg": "First run of openshift_repos" } TASK [openshift_repos : Record that openshift_repos already ran] *************** ok: [node02] RUNNING HANDLER [openshift_repos : refresh cache] ****************************** changed: [node02] PLAY [Install packages necessary for installer] ******************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Determine if chrony is installed] **************************************** changed: [node02] [WARNING]: Consider using the yum, dnf or zypper module rather than running rpm. If you need to use command because yum, dnf or zypper is insufficient you can add warn=False to this command task or set command_warnings=False in ansible.cfg to get rid of this message. TASK [Install ntp package] ***************************************************** skipping: [node02] TASK [Start and enable ntpd/chronyd] ******************************************* changed: [node02] TASK [Ensure openshift-ansible installer package deps are installed] *********** ok: [node02] => (item=iproute) ok: [node02] => (item=dbus-python) ok: [node02] => (item=PyYAML) ok: [node02] => (item=python-ipaddress) ok: [node02] => (item=libsemanage-python) ok: [node02] => (item=yum-utils) ok: [node02] => (item=python-docker) PLAY [Initialize cluster facts] ************************************************ TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [get openshift_current_version] ******************************************* ok: [node02] ok: [node01] TASK [set_fact openshift_portal_net if present on masters] ********************* ok: [node01] ok: [node02] TASK [Gather Cluster facts] **************************************************** changed: [node02] changed: [node01] TASK [Set fact of no_proxy_internal_hostnames] ********************************* skipping: [node01] skipping: [node02] TASK [Initialize openshift.node.sdn_mtu] *************************************** changed: [node02] ok: [node01] PLAY [Initialize etcd host variables] ****************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** ok: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** ok: [node01] => { "msg": "openshift_pkg_version was not defined. Falling back to -3.10.0" } TASK [openshift_version : set_fact] ******************************************** ok: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Verify Requirements] ***************************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [Run variable sanity checks] ********************************************** ok: [node01] TASK [Validate openshift_node_groups and openshift_node_group_name] ************ ok: [node01] PLAY [Initialization Checkpoint End] ******************************************* TASK [Set install initialization 'Complete'] *********************************** ok: [node01] PLAY [Validate node hostnames] ************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Query DNS for IP address of node02] ************************************** ok: [node02] TASK [Validate openshift_hostname when defined] ******************************** skipping: [node02] TASK [Validate openshift_ip exists on node when defined] *********************** skipping: [node02] PLAY [Configure os_firewall] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [os_firewall : Detecting Atomic Host Operating System] ******************** ok: [node02] TASK [os_firewall : Set fact r_os_firewall_is_atomic] ************************** ok: [node02] TASK [os_firewall : Fail - Firewalld is not supported on Atomic Host] ********** skipping: [node02] TASK [os_firewall : Install firewalld packages] ******************************** skipping: [node02] TASK [os_firewall : Ensure iptables services are not enabled] ****************** skipping: [node02] => (item=iptables) skipping: [node02] => (item=ip6tables) TASK [os_firewall : Wait 10 seconds after disabling iptables] ****************** skipping: [node02] TASK [os_firewall : Start and enable firewalld service] ************************ skipping: [node02] TASK [os_firewall : need to pause here, otherwise the firewalld service starting can sometimes cause ssh to fail] *** skipping: [node02] TASK [os_firewall : Restart polkitd] ******************************************* skipping: [node02] TASK [os_firewall : Wait for polkit action to have been created] *************** skipping: [node02] TASK [os_firewall : Ensure firewalld service is not enabled] ******************* ok: [node02] TASK [os_firewall : Wait 10 seconds after disabling firewalld] ***************** skipping: [node02] TASK [os_firewall : Install iptables packages] ********************************* ok: [node02] => (item=iptables) ok: [node02] => (item=iptables-services) TASK [os_firewall : Start and enable iptables service] ************************* ok: [node02 -> node02] => (item=node02) TASK [os_firewall : need to pause here, otherwise the iptables service starting can sometimes cause ssh to fail] *** skipping: [node02] PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [container_runtime : Setup the docker-storage for overlay] **************** skipping: [node02] TASK [container_runtime : Create file system on extra volume device] *********** TASK [container_runtime : Create mount entry for extra volume] ***************** PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** ok: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* ok: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Add http_proxy to /etc/atomic.conf] ****************** skipping: [node02] TASK [container_runtime : Add https_proxy to /etc/atomic.conf] ***************** skipping: [node02] TASK [container_runtime : Add no_proxy to /etc/atomic.conf] ******************** skipping: [node02] TASK [container_runtime : Get current installed Docker version] **************** ok: [node02] TASK [container_runtime : Error out if Docker pre-installed but too old] ******* skipping: [node02] TASK [container_runtime : Error out if requested Docker is too old] ************ skipping: [node02] TASK [container_runtime : Install Docker] ************************************** skipping: [node02] TASK [container_runtime : Ensure docker.service.d directory exists] ************ ok: [node02] TASK [container_runtime : Configure Docker service unit file] ****************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Set registry params] ********************************* skipping: [node02] => (item={u'reg_conf_var': u'ADD_REGISTRY', u'reg_flag': u'--add-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'BLOCK_REGISTRY', u'reg_flag': u'--block-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'INSECURE_REGISTRY', u'reg_flag': u'--insecure-registry', u'reg_fact_val': []}) TASK [container_runtime : Place additional/blocked/insecure registries in /etc/containers/registries.conf] *** skipping: [node02] TASK [container_runtime : Set Proxy Settings] ********************************** skipping: [node02] => (item={u'reg_conf_var': u'HTTP_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'HTTPS_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'NO_PROXY', u'reg_fact_val': u''}) TASK [container_runtime : Set various Docker options] ************************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Configure Docker Network OPTIONS] ******************** ok: [node02] TASK [container_runtime : Detect if docker is already started] ***************** ok: [node02] TASK [container_runtime : Start the Docker service] **************************** ok: [node02] TASK [container_runtime : set_fact] ******************************************** ok: [node02] TASK [container_runtime : Check for docker_storage_path/overlay2] ************** ok: [node02] TASK [container_runtime : Fixup SELinux permissions for docker] **************** changed: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* ok: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** ok: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** ok: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] TASK [container_runtime : Fail if Atomic Host since this is an rpm request] **** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Check that overlay is in the kernel] ***************** skipping: [node02] TASK [container_runtime : Add overlay to modprobe.d] *************************** skipping: [node02] TASK [container_runtime : Manually modprobe overlay into the kernel] *********** skipping: [node02] TASK [container_runtime : Enable and start systemd-modules-load] *************** skipping: [node02] TASK [container_runtime : Install cri-o] *************************************** skipping: [node02] TASK [container_runtime : Remove CRI-O default configuration files] ************ skipping: [node02] => (item=/etc/cni/net.d/200-loopback.conf) skipping: [node02] => (item=/etc/cni/net.d/100-crio-bridge.conf) TASK [container_runtime : Create the CRI-O configuration] ********************** skipping: [node02] TASK [container_runtime : Ensure CNI configuration directory exists] *********** skipping: [node02] TASK [container_runtime : Add iptables allow rules] **************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove iptables rules] ******************************* TASK [container_runtime : Add firewalld allow rules] *************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove firewalld allow rules] ************************ TASK [container_runtime : Configure the CNI network] *************************** skipping: [node02] TASK [container_runtime : Create /etc/sysconfig/crio-network] ****************** skipping: [node02] TASK [container_runtime : Start the CRI-O service] ***************************** skipping: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* skipping: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** skipping: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** skipping: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** skipping: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Node Preparation Checkpoint Start] *************************************** TASK [Set Node preparation 'In Progress'] ************************************** ok: [node01] PLAY [Only target nodes that have not yet been bootstrapped] ******************* TASK [Gathering Facts] ********************************************************* ok: [localhost] TASK [add_host] **************************************************************** skipping: [localhost] => (item=node02) ok: [localhost] => (item=node01) PLAY [Disable excluders] ******************************************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/disable.yml for node02 TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-docker-excluder version detected] ************ skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-docker-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-excluder version detected] ******************* skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** skipping: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] PLAY [Configure nodes] ********************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_cloud_provider : Set cloud provider facts] ********************* skipping: [node02] TASK [openshift_cloud_provider : Create cloudprovider config dir] ************** skipping: [node02] TASK [openshift_cloud_provider : include the defined cloud provider files] ***** skipping: [node02] TASK [openshift_node : fail] *************************************************** skipping: [node02] TASK [openshift_node : Check for NetworkManager service] *********************** ok: [node02] TASK [openshift_node : Set fact using_network_manager] ************************* ok: [node02] TASK [openshift_node : Install dnsmasq] **************************************** ok: [node02] TASK [openshift_node : ensure origin/node directory exists] ******************** changed: [node02] => (item=/etc/origin) changed: [node02] => (item=/etc/origin/node) TASK [openshift_node : Install NetworkManager during node_bootstrap provisioning] *** skipping: [node02] TASK [openshift_node : Install network manager dispatch script] **************** skipping: [node02] TASK [openshift_node : Install dnsmasq configuration] ************************** ok: [node02] TASK [openshift_node : Deploy additional dnsmasq.conf] ************************* skipping: [node02] TASK [openshift_node : Enable dnsmasq] ***************************************** ok: [node02] TASK [openshift_node : Install network manager dispatch script] **************** ok: [node02] TASK [openshift_node : Add iptables allow rules] ******************************* ok: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) ok: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) ok: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) ok: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) ok: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove iptables rules] ********************************** TASK [openshift_node : Add firewalld allow rules] ****************************** skipping: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) skipping: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) skipping: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) skipping: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) skipping: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove firewalld allow rules] *************************** TASK [openshift_node : Checking for journald.conf] ***************************** ok: [node02] TASK [openshift_node : Create journald persistence directories] **************** ok: [node02] TASK [openshift_node : Update journald setup] ********************************** ok: [node02] => (item={u'var': u'Storage', u'val': u'persistent'}) ok: [node02] => (item={u'var': u'Compress', u'val': True}) ok: [node02] => (item={u'var': u'SyncIntervalSec', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitInterval', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitBurst', u'val': 10000}) ok: [node02] => (item={u'var': u'SystemMaxUse', u'val': u'8G'}) ok: [node02] => (item={u'var': u'SystemKeepFree', u'val': u'20%'}) ok: [node02] => (item={u'var': u'SystemMaxFileSize', u'val': u'10M'}) ok: [node02] => (item={u'var': u'MaxRetentionSec', u'val': u'1month'}) ok: [node02] => (item={u'var': u'MaxFileSec', u'val': u'1day'}) ok: [node02] => (item={u'var': u'ForwardToSyslog', u'val': False}) ok: [node02] => (item={u'var': u'ForwardToWall', u'val': False}) TASK [openshift_node : Restart journald] *************************************** skipping: [node02] TASK [openshift_node : Disable swap] ******************************************* ok: [node02] TASK [openshift_node : Install node, clients, and conntrack packages] ********** ok: [node02] => (item={u'name': u'origin-node-3.10.0*'}) ok: [node02] => (item={u'name': u'origin-clients-3.10.0*'}) ok: [node02] => (item={u'name': u'conntrack-tools'}) TASK [openshift_node : Restart cri-o] ****************************************** skipping: [node02] TASK [openshift_node : restart NetworkManager to ensure resolv.conf is present] *** changed: [node02] TASK [openshift_node : sysctl] ************************************************* ok: [node02] TASK [openshift_node : Check for credentials file for registry auth] *********** skipping: [node02] TASK [openshift_node : Create credentials for registry auth] ******************* skipping: [node02] TASK [openshift_node : Create credentials for registry auth (alternative)] ***** skipping: [node02] TASK [openshift_node : Setup ro mount of /root/.docker for containerized hosts] *** skipping: [node02] TASK [openshift_node : Check that node image is present] *********************** changed: [node02] TASK [openshift_node : Pre-pull node image] ************************************ skipping: [node02] TASK [openshift_node : Copy node script to the node] *************************** ok: [node02] TASK [openshift_node : Install Node service file] ****************************** ok: [node02] TASK [openshift_node : Ensure old system path is set] ************************** skipping: [node02] => (item=/etc/origin/openvswitch) skipping: [node02] => (item=/var/lib/kubelet) skipping: [node02] => (item=/opt/cni/bin) TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Copy node container image to ostree storage] ************ skipping: [node02] TASK [openshift_node : Install or Update node system container] **************** skipping: [node02] TASK [openshift_node : Restart network manager to ensure networking configuration is in place] *** skipping: [node02] TASK [openshift_node : Configure Node settings] ******************************** ok: [node02] => (item={u'regex': u'^OPTIONS=', u'line': u'OPTIONS='}) ok: [node02] => (item={u'regex': u'^DEBUG_LOGLEVEL=', u'line': u'DEBUG_LOGLEVEL=2'}) ok: [node02] => (item={u'regex': u'^IMAGE_VERSION=', u'line': u'IMAGE_VERSION=v3.10.0-rc.0'}) TASK [openshift_node : Configure Proxy Settings] ******************************* skipping: [node02] => (item={u'regex': u'^HTTP_PROXY=', u'line': u'HTTP_PROXY='}) skipping: [node02] => (item={u'regex': u'^HTTPS_PROXY=', u'line': u'HTTPS_PROXY='}) skipping: [node02] => (item={u'regex': u'^NO_PROXY=', u'line': u'NO_PROXY=[],172.30.0.0/16,10.128.0.0/14'}) TASK [openshift_node : file] *************************************************** skipping: [node02] TASK [openshift_node : Create the Node config] ********************************* changed: [node02] TASK [openshift_node : Configure Node Environment Variables] ******************* TASK [openshift_node : Ensure the node static pod directory exists] ************ changed: [node02] TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Install NFS storage plugin dependencies] **************** ok: [node02] TASK [openshift_node : Check for existence of nfs sebooleans] ****************** ok: [node02] => (item=virt_use_nfs) ok: [node02] => (item=virt_sandbox_use_nfs) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-26 09:02:30.348561', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.270932', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-26 09:02:30.077629', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-26 09:02:31.610430', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.017394', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-26 09:02:31.593036', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-26 09:02:30.348561', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.270932', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-26 09:02:30.077629', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-26 09:02:31.610430', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.017394', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-26 09:02:31.593036', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install GlusterFS storage plugin dependencies] ********** ok: [node02] TASK [openshift_node : Check for existence of fusefs sebooleans] *************** ok: [node02] => (item=virt_use_fusefs) ok: [node02] => (item=virt_sandbox_use_fusefs) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-26 09:02:38.454899', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.017590', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-26 09:02:38.437309', '_ansible_ignore_errors': None, 'failed': False}) ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-26 09:02:39.758462', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.018895', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-26 09:02:39.739567', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-26 09:02:38.454899', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.017590', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-26 09:02:38.437309', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-26 09:02:39.758462', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.018895', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-26 09:02:39.739567', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install Ceph storage plugin dependencies] *************** ok: [node02] TASK [openshift_node : Install iSCSI storage plugin dependencies] ************** ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=device-mapper-multipath) TASK [openshift_node : restart services] *************************************** ok: [node02] => (item=multipathd) ok: [node02] => (item=rpcbind) ok: [node02] => (item=iscsid) TASK [openshift_node : Template multipath configuration] *********************** changed: [node02] TASK [openshift_node : Enable and start multipath] ***************************** changed: [node02] TASK [tuned : Check for tuned package] ***************************************** ok: [node02] TASK [tuned : Set tuned OpenShift variables] *********************************** ok: [node02] TASK [tuned : Ensure directory structure exists] ******************************* ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Ensure files are populated from templates] *********************** skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Make tuned use the recommended tuned profile on restart] ********* changed: [node02] => (item=/etc/tuned/active_profile) changed: [node02] => (item=/etc/tuned/profile_mode) TASK [tuned : Restart tuned service] ******************************************* changed: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Install logrotate] ******* ok: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Setup logrotate.d scripts] *** PLAY [node bootstrap config] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_node : install needed rpm(s)] ********************************** ok: [node02] => (item=origin-node) ok: [node02] => (item=origin-docker-excluder) ok: [node02] => (item=ansible) ok: [node02] => (item=bash-completion) ok: [node02] => (item=docker) ok: [node02] => (item=haproxy) ok: [node02] => (item=dnsmasq) ok: [node02] => (item=ntp) ok: [node02] => (item=logrotate) ok: [node02] => (item=httpd-tools) ok: [node02] => (item=bind-utils) ok: [node02] => (item=firewalld) ok: [node02] => (item=libselinux-python) ok: [node02] => (item=conntrack-tools) ok: [node02] => (item=openssl) ok: [node02] => (item=iproute) ok: [node02] => (item=python-dbus) ok: [node02] => (item=PyYAML) ok: [node02] => (item=yum-utils) ok: [node02] => (item=glusterfs-fuse) ok: [node02] => (item=device-mapper-multipath) ok: [node02] => (item=nfs-utils) ok: [node02] => (item=cockpit-ws) ok: [node02] => (item=cockpit-system) ok: [node02] => (item=cockpit-bridge) ok: [node02] => (item=cockpit-docker) ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=ceph-common) TASK [openshift_node : create the directory for node] ************************** skipping: [node02] TASK [openshift_node : laydown systemd override] ******************************* skipping: [node02] TASK [openshift_node : update the sysconfig to have necessary variables] ******* ok: [node02] => (item={u'regexp': u'^KUBECONFIG=.*', u'line': u'KUBECONFIG=/etc/origin/node/bootstrap.kubeconfig'}) TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : disable origin-node service] **************************** changed: [node02] => (item=origin-node.service) TASK [openshift_node : Check for RPM generated config marker file .config_managed] *** ok: [node02] TASK [openshift_node : create directories for bootstrapping] ******************* ok: [node02] => (item=/root/openshift_bootstrap) changed: [node02] => (item=/var/lib/origin/openshift.local.config) changed: [node02] => (item=/var/lib/origin/openshift.local.config/node) ok: [node02] => (item=/etc/docker/certs.d/docker-registry.default.svc:5000) TASK [openshift_node : laydown the bootstrap.yml file for on boot configuration] *** ok: [node02] TASK [openshift_node : Create a symlink to the node client CA for the docker registry] *** ok: [node02] TASK [openshift_node : Remove RPM generated config files if present] *********** skipping: [node02] => (item=master) skipping: [node02] => (item=.config_managed) TASK [openshift_node : find all files in /etc/origin/node so we can remove them] *** skipping: [node02] TASK [openshift_node : Remove everything except the resolv.conf required for node] *** skipping: [node02] TASK [openshift_node_group : create node config template] ********************** changed: [node02] TASK [openshift_node_group : remove existing node config] ********************** changed: [node02] TASK [openshift_node_group : Ensure required directories are present] ********** ok: [node02] => (item=/etc/origin/node/pods) changed: [node02] => (item=/etc/origin/node/certificates) TASK [openshift_node_group : Update the sysconfig to group "node-config-compute"] *** changed: [node02] TASK [set_fact] **************************************************************** ok: [node02] PLAY [Re-enable excluder if it was previously enabled] ************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/enable.yml for node02 TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] PLAY [Node Preparation Checkpoint End] ***************************************** TASK [Set Node preparation 'Complete'] ***************************************** ok: [node01] PLAY [Distribute bootstrap and start nodes] ************************************ TASK [openshift_node : Gather node information] ******************************** changed: [node02] ok: [node01] TASK [openshift_node : Copy master bootstrap config locally] ******************* ok: [node02] TASK [openshift_node : Distribute bootstrap kubeconfig if one does not exist] *** ok: [node01] changed: [node02] TASK [openshift_node : Start and enable node for bootstrapping] **************** changed: [node02] changed: [node01] TASK [openshift_node : Get node logs] ****************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : debug] ************************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : fail] *************************************************** skipping: [node02] skipping: [node01] PLAY [Approve any pending CSR requests from inventory nodes] ******************* TASK [Dump all candidate bootstrap hostnames] ********************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Find all hostnames for bootstrapping] ************************************ ok: [node01] TASK [Dump the bootstrap hostnames] ******************************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Approve bootstrap nodes] ************************************************* changed: [node01] TASK [Get CSRs] **************************************************************** skipping: [node01] TASK [Report approval errors] ************************************************** skipping: [node01] PLAY [Ensure any inventory labels are applied to the nodes] ******************** TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [openshift_manage_node : Wait for master API to become available before proceeding] *** skipping: [node02] TASK [openshift_manage_node : Wait for Node Registration] ********************** ok: [node02 -> node01] ok: [node01 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/config.yml for node02, node01 TASK [openshift_manage_node : Set node schedulability] ************************* ok: [node02 -> node01] ok: [node01 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/set_default_node_role.yml for node02, node01 TASK [openshift_manage_node : Retrieve nodes that are marked with the infra selector or the legacy infra selector] *** ok: [node02 -> node01] TASK [openshift_manage_node : Label infra or legacy infra nodes with the new role label] *** TASK [openshift_manage_node : Retrieve non-infra, non-master nodes that are not yet labeled compute] *** ok: [node02 -> node01] TASK [openshift_manage_node : label non-master non-infra nodes compute] ******** TASK [openshift_manage_node : Label all-in-one master as a compute node] ******* skipping: [node02] PLAY RECAP ********************************************************************* localhost : ok=30 changed=0 unreachable=0 failed=0 node01 : ok=71 changed=3 unreachable=0 failed=0 node02 : ok=155 changed=33 unreachable=0 failed=0 INSTALLER STATUS *************************************************************** Initialization : Complete (0:04:46) Node Preparation : Complete (0:04:16) Sending file modes: C0755 110489328 oc Sending file modes: C0600 5649 admin.kubeconfig Cluster "node01:8443" set. Cluster "node01:8443" set. + set +e + kubectl get nodes --no-headers + cluster/kubectl.sh get nodes --no-headers node01 Ready compute,infra,master 18d v1.10.0+b81c8f8 node02 Ready compute 55s v1.10.0+b81c8f8 + kubectl_rc=0 + '[' 0 -ne 0 ']' ++ kubectl get nodes --no-headers ++ cluster/kubectl.sh get nodes --no-headers ++ grep NotReady + '[' -n '' ']' + set -e + echo 'Nodes are ready:' Nodes are ready: + kubectl get nodes + cluster/kubectl.sh get nodes NAME STATUS ROLES AGE VERSION node01 Ready compute,infra,master 18d v1.10.0+b81c8f8 node02 Ready compute 56s v1.10.0+b81c8f8 + make cluster-sync ./cluster/build.sh Building ... sha256:559a45ac63f40982ccce3a1b80cb62788566f2032c847ad9c45ee993eb9c48d4 go version go1.10 linux/amd64 go version go1.10 linux/amd64 make[1]: Entering directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' hack/dockerized "./hack/check.sh && KUBEVIRT_VERSION= ./hack/build-go.sh install " && ./hack/build-copy-artifacts.sh sha256:559a45ac63f40982ccce3a1b80cb62788566f2032c847ad9c45ee993eb9c48d4 go version go1.10 linux/amd64 Waiting for rsyncd to be ready go version go1.10 linux/amd64 find: '/root/go/src/kubevirt.io/kubevirt/_out/cmd': No such file or directory Compiling tests... compiled tests.test hack/build-docker.sh build Sending build context to Docker daemon 40.37 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> d3c656a2b485 Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-controller ---> Using cache ---> a776f834c795 Step 4/8 : WORKDIR /home/virt-controller ---> Using cache ---> 714b6ef15e78 Step 5/8 : USER 1001 ---> Using cache ---> cadd485aa8f4 Step 6/8 : COPY virt-controller /usr/bin/virt-controller ---> c99a99053215 Removing intermediate container 6b0216746abb Step 7/8 : ENTRYPOINT /usr/bin/virt-controller ---> Running in d8c6eba842fd ---> f4c0e155cbe7 Removing intermediate container d8c6eba842fd Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-controller" '' ---> Running in 1b392be02e45 ---> 93f9622344ca Removing intermediate container 1b392be02e45 Successfully built 93f9622344ca Sending build context to Docker daemon 43.3 MB Step 1/10 : FROM kubevirt/libvirt:4.2.0 ---> 5f0bfe81a3e0 Step 2/10 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 795ad92a5172 Step 3/10 : RUN dnf -y install socat genisoimage util-linux libcgroup-tools ethtool net-tools sudo && dnf -y clean all && test $(id -u qemu) = 107 # make sure that the qemu user really is 107 ---> Using cache ---> 49e8a67155c8 Step 4/10 : COPY virt-launcher /usr/bin/virt-launcher ---> 81781b070a6e Removing intermediate container 7b91887e737e Step 5/10 : COPY kubevirt-sudo /etc/sudoers.d/kubevirt ---> 8bdf07cee65d Removing intermediate container 8b4ac3e8c6df Step 6/10 : RUN setcap CAP_NET_BIND_SERVICE=+eip /usr/bin/qemu-system-x86_64 ---> Running in c02330b1f836  ---> aa2eb3c7d419 Removing intermediate container c02330b1f836 Step 7/10 : RUN mkdir -p /usr/share/kubevirt/virt-launcher ---> Running in 9988dc2dc46c  ---> 128ff7a1a733 Removing intermediate container 9988dc2dc46c Step 8/10 : COPY entrypoint.sh libvirtd.sh sock-connector /usr/share/kubevirt/virt-launcher/ ---> 536b72a9cb1b Removing intermediate container 1f0ff071ef1e Step 9/10 : ENTRYPOINT /usr/share/kubevirt/virt-launcher/entrypoint.sh ---> Running in e89e00c48373 ---> 25ff85ccdb3b Removing intermediate container e89e00c48373 Step 10/10 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-launcher" '' ---> Running in e1acb0bdc808 ---> ad9c6b090ae6 Removing intermediate container e1acb0bdc808 Successfully built ad9c6b090ae6 Sending build context to Docker daemon 41.67 MB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> d3c656a2b485 Step 3/5 : COPY virt-handler /usr/bin/virt-handler ---> 33599475d260 Removing intermediate container 91f8316ab052 Step 4/5 : ENTRYPOINT /usr/bin/virt-handler ---> Running in a2874d54f7ee ---> cddc9277022d Removing intermediate container a2874d54f7ee Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-handler" '' ---> Running in 77ebca225a4f ---> 27f554a1e1d6 Removing intermediate container 77ebca225a4f Successfully built 27f554a1e1d6 Sending build context to Docker daemon 38.81 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> d3c656a2b485 Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-api ---> Using cache ---> 9bbbc9ec8ccc Step 4/8 : WORKDIR /home/virt-api ---> Using cache ---> 6ff95ae380a5 Step 5/8 : USER 1001 ---> Using cache ---> 0026fc44bed8 Step 6/8 : COPY virt-api /usr/bin/virt-api ---> a71cd62dae5e Removing intermediate container b723a0dd3267 Step 7/8 : ENTRYPOINT /usr/bin/virt-api ---> Running in 60417b675cfd ---> e2cdf7608b6d Removing intermediate container 60417b675cfd Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-api" '' ---> Running in d68e7e523c63 ---> a3137ba4f89c Removing intermediate container d68e7e523c63 Successfully built a3137ba4f89c Sending build context to Docker daemon 4.096 kB Step 1/7 : FROM fedora:28 ---> cc510acfcd70 Step 2/7 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> d3c656a2b485 Step 3/7 : ENV container docker ---> Using cache ---> d7ee9dd5410a Step 4/7 : RUN mkdir -p /images/custom /images/alpine && truncate -s 64M /images/custom/disk.img && curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /images/alpine/disk.img ---> Using cache ---> 0b64ac188f84 Step 5/7 : ADD entrypoint.sh / ---> Using cache ---> c9569040fd52 Step 6/7 : CMD /entrypoint.sh ---> Using cache ---> b0887fd36d1c Step 7/7 : LABEL "disks-images-provider" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Running in 072acbfd43ac ---> 7eb9ff0f2a47 Removing intermediate container 072acbfd43ac Successfully built 7eb9ff0f2a47 Sending build context to Docker daemon 2.56 kB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> d3c656a2b485 Step 3/5 : ENV container docker ---> Using cache ---> d7ee9dd5410a Step 4/5 : RUN dnf -y install procps-ng nmap-ncat && dnf -y clean all ---> Using cache ---> e96d3e3c109a Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "vm-killer" '' ---> Running in d9decf6a1346 ---> ea3ccf7c8a6e Removing intermediate container d9decf6a1346 Successfully built ea3ccf7c8a6e Sending build context to Docker daemon 5.12 kB Step 1/7 : FROM debian:sid ---> 68f33cf86aab Step 2/7 : MAINTAINER "David Vossel" \ ---> Using cache ---> b7f20b0c4c41 Step 3/7 : ENV container docker ---> Using cache ---> 83fc28f38982 Step 4/7 : RUN apt-get update && apt-get install -y bash curl bzip2 qemu-utils && mkdir -p /disk && rm -rf /var/lib/apt/lists/* ---> Using cache ---> 604b0b292d97 Step 5/7 : ADD entry-point.sh / ---> Using cache ---> 78792d6f56cd Step 6/7 : CMD /entry-point.sh ---> Using cache ---> 7f24cc15e083 Step 7/7 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "registry-disk-v1alpha" '' ---> Running in 71517ba6cf70 ---> a1b75bca8b71 Removing intermediate container 71517ba6cf70 Successfully built a1b75bca8b71 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:32802/kubevirt/registry-disk-v1alpha:devel ---> a1b75bca8b71 Step 2/4 : MAINTAINER "David Vossel" \ ---> Running in 815368c9ae07 ---> b61534d778aa Removing intermediate container 815368c9ae07 Step 3/4 : RUN curl https://download.cirros-cloud.net/0.4.0/cirros-0.4.0-x86_64-disk.img > /disk/cirros.img ---> Running in f17272ec0caa   % Total % Received % Xferd Average Speed Time Time Time Current  Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:--  0 6 12.1M 6 784k 0 0 798k 0 0:00:15 --:--:-- 0:00:15 798k 100 12.1M 100 12.1M 0 0 8323k 0 0:00:01 0:00:01 --:--:-- 8328k  ---> 9d532f861188 Removing intermediate container f17272ec0caa Step 4/4 : LABEL "cirros-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Running in b649d6952f40 ---> 68ee6ae8b029 Removing intermediate container b649d6952f40 Successfully built 68ee6ae8b029 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:32802/kubevirt/registry-disk-v1alpha:devel ---> a1b75bca8b71 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Running in e82b0d1483f2 ---> 8d6f129a27c0 Removing intermediate container e82b0d1483f2 Step 3/4 : RUN curl -g -L https://download.fedoraproject.org/pub/fedora/linux/releases/27/CloudImages/x86_64/images/Fedora-Cloud-Base-27-1.6.x86_64.qcow2 > /disk/fedora.qcow2 ---> Running in 040c627cca8d  % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed  0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0  0 221M 0 715k 0 0 719k 0 0:05:15 --:--:-- 0:05:15 719k 1 221M 1 3612k 0 0 1841k 0 0:02:03 0:00:01 0:02:02 2995k 3 221M 3 6917k 0 0 2285k 0 0:01:39 0:00:03 0:01:36 3053k 3 221M 3 8879k 0 0 2238k 0 0:01:41 0:00:03 0:01:38 2746k 4 221M 4 10.7M 0 0 2218k 0 0:01:42 0:00:04 0:01:38 2593k 5 221M 5 13.1M 0 0 2260k 0 0:01:40 0:00:05 0:01:35 2568k 7 221M 7 15.9M 0 0 2334k 0 0:01:37 0:00:06 0:01:31 2526k 8 221M 8 18.9M 0 0 2436k 0 0:01:33 0:00:07 0:01:26 2527k 9 221M 9 22.0M 0 0 2524k 0 0:01:29 0:00:08 0:01:21 2751k 11 221M 11 24.7M 0 0 2537k 0 0:01:29 0:00:10 0:01:19 2852k 11 221M 11 26.4M 0 0 2474k 0 0:01:31 0:00:10 0:01:21 2730k 12 221M 12 28.4M 0  0 2432k 0 0:01:33 0:00:11 0:01:22 2570k 13 221M 13 30.7M 0 0 2427k 0 0:01:33 0:00:12 0:01:21 2413k 14 221M 14 32.5M 0 0 2373k 0 0:01:35 0:00:14 0:01:21 2107k 15 221M 15 33.3M 0 0 2280k 0 0:01:39 0:00:14 0:01:25 1764k 15 221M 15 34.5M 0 0 2212k 0 0:01:42 0:00:15 0:01:27 1638k 16 221M 16 35.9M 0 0 2171k 0 0:01:44 0:00:16 0:01:28 1544k 17 221M 17 37.8M 0 0 2154k 0 0:01:45 0:00:17 0:01:28 1449k 17 221M 17 39.8M 0 0 2150k 0 0:01:45 0:00:18 0:01:27 1514k 18 221M 18 41.7M 0 0 2139k 0 0:01:46 0:00:19 0:01:27 1715k 19 221M 19 43.7M 0 0 2134k 0 0:01:46 0:00:20 0:01:26 1886k 20 221M 20 45.3M 0 0 2114k 0 0:01:47 0:00:21 0:01:26 1917k 21 221M 21 46.8M 0 0 2087k 0 0:01:48 0:00:22 0:01:26 1847k 21 221M 21 48.6M 0 0 2078k 0 0:01:49 0:00:23 0:01:26 1807k 22 221M 22 50.6M 0 0 2076k 0 0:01:49 0:00:24 0:01:25 1828k 23 221M 23 52.5M 0 0 2071k 0 0:01:49 0:00:25 0:01:24 1804k 24 221M 24 54.7M 0 0 2075k 0 0:01:49 0:00:26 0:01:23 1906k 25 221M 25 57.1M 0 0 2089k 0 0:01:48 0:00:27 0:01:21 2100k 27 221M 27 59.8M 0 0 2114k 0 0:01:47 0:00:28 0:01:19 2287k 28 221M 28 62.8M 0 0 2148k 0 0:01:45 0:00:29 0:01:16 2508k 29 221M 29 65.7M 0 0 2174k 0 0:01:44 0:00:30 0:01:14 2709k 31 221M 31 68.7M 0 0 2199k 0 0:01:43 0:00:31 0:01:12 2867k 32 221M 32 71.8M 0 0 2231k 0 0:01:41 0:00:32 0:01:09 3025k 33 221M 33 74.7M 0 0 2254k 0 0:01:40 0:00:33 0:01:07 3065k 35 221M 35 77.6M 0  0 2267k 0 0:01:40 0:00:35 0:01:05 2964k 35 221M 35 78.9M 0 0 2245k 0 0:01:41 0:00:35 0:01:06 2680k 36 221M 36 80.3M 0 0 2225k 0 0:01:41 0:00:36 0:01:05 2395k 37 221M 37 82.1M 0 0 2215k 0 0:01:42 0:00:37 0:01:05 2107k 38 221M 38 84.2M 0 0 2214k 0 0:01:42 0:00:38 0:01:04 1939k 39 221M 39 86.7M 0 0 2223k 0 0:01:42 0:00:39 0:01:03 1906k 40 221M 40 89.2M 0 0 2228k 0 0:01:41 0:00:40 0:01:01 2110k 41 221M 41 90.9M 0 0 2218k 0 0:01:42 0:00:41 0:01:01 2167k 41 221M 41 92.3M 0 0 2201k 0 0:01:43 0:00:42 0:01:01 2095k 42 221M 42 93.6M 0 0 2181k 0 0:01:44 0:00:43 0:01:01 1923k 42 221M 42 95.0M 0 0 2164k 0 0:01:44 0:00:44 0:01:00 1695k 43 221M 43 96.4M 0 0 2148k 0 0:01:45 0:00:45 0:01:00 1489k 44 221M 44 98.2M 0 0 2141k 0 0:01:45 0:00:46 0:00:59 1488k 45 221M 45 99.7M 0 0 2129k 0 0:01:46 0:00:47 0:00:59 1507k 45  221M 45 101M 0 0 2117k 0 0:01:47 0:00:48 0:00:59 1555k  46 221M 46 102M    0  0 2108k   0 0:01:47 0:00:49 0:00:58 1612k 47 221M 47 104M 0 0 2102k 0 0:01:47 0:00:50 0:00:57 1677k 47 221M 47 106M 0 0 2090k 0 0:01:48 0:00:51 0:00:57 1616k 48 221M 48 107M 0 0 2080k 0 0:01:49 0:00:52 0:00:57 1618k 49 221M 49 108M 0 0 2067k 0 0:01:49 0:00:53 0:00:56 1579k  49 221M 49 110M 0 0 2053k 0 0:01:50 0:00:54 0:00:56 1494k 50 221M 50 111M 0 0 2045k 0 0:01:50 0:00:55 0:00:55 1465k 51 221M 51 113M 0 0 2043k 0 0:01:51 0:00:56 0:00:55 1550k 51 221M 51 114M 0 0 2023k 0 0:01:52 0:00:57 0:00:55 1413k 52 221M 52 115M 0 0 2008k 0 0:01:52 0:00:58 0:00:54 1372k 52 221M 52 116M 0 0 1993k 0 0:01:53 0:01:00 0:00:53 1336k 53 221M 53 117M 0 0 1977k 0 0:01:54 0:01:00 0:00:54 1216k 53 221M 53 118M 0 0 1959k 0 0:01:55 0:01:01 0:00:54 1013k 53 221M 53 119M 0 0 1939k 0 0:01:56 0:01:02 0:00:54 963k 54 221M 54 120M 0 0 1924k 0 0:01:57 0:01:03 0:00:54 931k 54 221M 54 121M 0 0 1914k 0 0:01:58 0:01:04 0:00:54 967k 55 221M 55 123M 0 0 1910k 0 0:01:58 0:01:05 0:00:53 1089k 56 221M 56 125M 0 0 1911k 0 0:01:58 0:01:06 0:00:52 1306k 57 221M 57 126M 0 0 1907k 0 0:01:58 0:01:07 0:00:51 1501k 57 221M 57 128M 0 0 1902k 0 0:01:59 0:01:08 0:00:51 1623k 58 221M 58 129M 0 0 1900k 0 0:01:59 0:01:09 0:00:50 1714k 59 221M 59 131M 0 0 1904k 0 0:01:59 0:01:10 0:00:49 1825k 60 221M 60 134M 0 0 1910k 0 0:01:58 0:01:11 0:00:47 1907k 61 221M 61 135M 0 0 1905k 0 0:01:59 0:01:12 0:00:47 1880k 61 221M 61 136M 0 0 1894k 0 0:01:59 0:01:13 0:00:46 1778k 62 221M 62 137M 0 0 1882k 0 0:02:00 0:01:14 0:00:46 1638k 62 221M 62 139M 0 0 1875k 0 0:02:01 0:01:15 0:00:46 1459k 63 221M 63 140M 0 0 1867k 0 0:02:01  0:01:17 0:00:44 1256k 63 221M 63 141M 0 0 1861k 0 0:02:01 0:01:17 0:00:44 1225k 64 221M 64 143M 0 0 1858k 0 0:02:02 0:01:18 0:00:44 1322k 65 221M 65 144M 0 0 1853k 0 0:02:02 0:01:19 0:00:43 1407k 65 221M 65 146M 0 0 1849k 0 0:02:02 0:01:20 0:00:42 1462k 66 221M 66 147M 0 0 1843k 0 0:02:03 0:01:21 0:00:42 1467k 67 221M 67 149M 0 0 1842k 0 0:02:03 0:01:22 0:00:41 1549k 68 221M 68 151M 0 0 1843k 0 0:02:03 0:01:23 0:00:40 1607k 69 221M 69 152M 0 0 1843k 0 0:02:03 0:01:24 0:00:39 1690k 69 221M 69 154M 0 0 1845k 0 0:02:02 0:01:25 0:00:37 1778k 70 221M 70 156M 0 0 1839k 0 0:02:03 0:01:26 0:00:37 1768k 71 221M 71 157M 0 0 1833k 0 0:02:03 0:01:27 0:00:36 1673k 71 221M 71 158M 0 0 1828k 0 0:02:04  0:01:28 0:00:36 1580k 72 221M 72 160M 0 0 1826k 0 0:02:04 0:01:29 0:00:35 1536k 73 221M 73 161M 0 0 1820k 0 0:02:04 0:01:30 0:00:34 1391k 73 221M 73 163M 0 0 1815k  0 0:02:04 0:01:31 0:00:33 1404k 74 221M 74 164M 0 0 1814k 0 0:02:05 0:01:32 0:00:33 1488k 75 221M 75 166M 0 0 1817k 0 0:02:04 0:01:33 0:00:31 1618k 76 221M 76 169M 0 0 1823k 0 0:02:04 0:01:34 0:00:30 1768k 77 221M 77 171M 0 0 1832k 0 0:02:03 0:01:35 0:00:28 2058k 78 221M 78 174M 0 0 1845k 0 0:02:02 0:01:36 0:00:26 2389k 80 221M 80 178M 0 0 1861k 0 0:02:01 0:01:37 0:00:24 2720k 81 221M 81 181M 0 0 1877k 0 0:02:00 0:01:38 0:00:22 3010k 83 221M 83 184M 0 0 1884k 0 0:02:00 0:01:39 0:00:21 3058k 84 221M 84 186M 0 0 1889k 0 0:02:00 0:01:40 0:00:20 2982k 85 221M 85 188M 0 0 1891k 0 0:01:59 0:01:42 0:00:17 2777k 85 221M 85 190M 0 0 1891k 0 0:01:59 0:01:42 0:00:17 2492k 86 221M 86 192M 0 0 1893k 0 0:01:59 0:01:43 0:00:16 2214k 87 221M 87 194M 0 0 1899k 0 0:01:59 0:01:44 0:00:15 2183k 89 221M 89 197M 0 0 1907k 0 0:01:58 0:01:45 0:00:13 2265k 90 221M 90 200M 0 0 1918k 0 0:01:58 0:01:46 0:00:12 2480k 91 221M 91 203M 0 0 1932k 0 0:01:57 0:01:47 0:00:10 2777k 93 221M 93 207M 0 0 1950k 0 0:01:56 0:01:48 0:00:08 3133k 95 221M 95 210M 0 0 1961k 0 0:01:55 0:01:49 0:00:06 3268k 96 221M 96 213M 0 0 1970k 0 0:01:55 0:01:50 0:00:05 3315k 97 221M 97 215M 0 0 1969k 0 0:01:55 0:01:51 0:00:04 3056k 97 221M 97 216M 0 0 1961k 0 0:01:55 0:01:52 0:00:03 2590k 98 221M 98 217M 0 0 1954k 0 0:01:56 0:01:53 0:00:03 2035k 98 221M 98 218M 0 0 1946k 0 0:01:56 0:01:54 0:00:02 1624k 99 221M 99 220M 0 0 1942k 0 0:01:56 0:01:55 0:00:01 1312k 100 221M 100 221M 0 0 1941k 0 0:01:56 0:01:56 --:--:-- 1291k  ---> 244fb34b77a2 Removing intermediate container 040c627cca8d Step 4/4 : LABEL "fedora-cloud-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Running in 15bf7753d6a8 ---> 49b4ff50efe4 Removing intermediate container 15bf7753d6a8 Successfully built 49b4ff50efe4 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:32802/kubevirt/registry-disk-v1alpha:devel ---> a1b75bca8b71 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 8d6f129a27c0 Step 3/4 : RUN curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /disk/alpine.iso ---> Running in 13d2a2047304  % Total % Received % Xferd Average Speed Time Time Time Current  Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 0 37.0M 0 295k 0 0 357k 0 0:01:45 --:--:-- 0:01:45 357k 10 37.0M 10 4025k 0 0 2205k 0 0:00:17 0:00:01 0:00:16 2204k 26 37.0M 26 9947k 0 0 3521k 0 0:00:10 0:00:02 0:00:08 3519k 50 37.0M 50 18.7M 0 0 5015k 0 0:00:07 0:00:03 0:00:04 5013k 88 37.0M 88 32.7M 0 0 6942k 0 0:00:05 0:00:04 0:00:01 6941k 100 37.0M 100 37.0M 0 0 7486k 0 0:00:05  0:00:05 --:--:-- 8874k  ---> 91be64ef9ef2 Removing intermediate container 13d2a2047304 Step 4/4 : LABEL "alpine-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Running in 1547b73ecdb4 ---> 0268999a98b1 Removing intermediate container 1547b73ecdb4 Successfully built 0268999a98b1 Sending build context to Docker daemon 35.59 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> d3c656a2b485 Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virtctl ---> Using cache ---> 5704030d2070 Step 4/8 : WORKDIR /home/virtctl ---> Using cache ---> 624a72b3ef33 Step 5/8 : USER 1001 ---> Using cache ---> 74157fb56326 Step 6/8 : COPY subresource-access-test /subresource-access-test ---> 1b855c73cee8 Removing intermediate container b0ecdce9e9d5 Step 7/8 : ENTRYPOINT /subresource-access-test ---> Running in 48c06fc56db4 ---> 9fde6d146e69 Removing intermediate container 48c06fc56db4 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "subresource-access-test" '' ---> Running in 180def761b97 ---> 9ae7cd44ca13 Removing intermediate container 180def761b97 Successfully built 9ae7cd44ca13 Sending build context to Docker daemon 3.072 kB Step 1/9 : FROM fedora:28 ---> cc510acfcd70 Step 2/9 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> d3c656a2b485 Step 3/9 : ENV container docker ---> Using cache ---> d7ee9dd5410a Step 4/9 : RUN dnf -y install make git gcc && dnf -y clean all ---> Using cache ---> e4ae555b2a96 Step 5/9 : ENV GIMME_GO_VERSION 1.9.2 ---> Using cache ---> 4805ef8280c3 Step 6/9 : RUN mkdir -p /gimme && curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | HOME=/gimme bash >> /etc/profile.d/gimme.sh ---> Using cache ---> 7c1f17e56984 Step 7/9 : ENV GOPATH "/go" GOBIN "/usr/bin" ---> Using cache ---> c388427c6a76 Step 8/9 : RUN mkdir -p /go && source /etc/profile.d/gimme.sh && go get github.com/masterzen/winrm-cli ---> Using cache ---> 5da240e34c8d Step 9/9 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "winrmcli" '' ---> Running in 693695a3e31d ---> 4c4e66b3b0e5 Removing intermediate container 693695a3e31d Successfully built 4c4e66b3b0e5 Sending build context to Docker daemon 36.78 MB Step 1/5 : FROM fedora:27 ---> 9110ae7f579f Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 58c7014d7bc4 Step 3/5 : COPY example-hook-sidecar /example-hook-sidecar ---> 8c091243d279 Removing intermediate container b76b047ae90a Step 4/5 : ENTRYPOINT /example-hook-sidecar ---> Running in 85c9d6dc9b94 ---> a0fba165bcf3 Removing intermediate container 85c9d6dc9b94 Step 5/5 : LABEL "example-hook-sidecar" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Running in 02f106916aa9 ---> 427aa490d873 Removing intermediate container 02f106916aa9 Successfully built 427aa490d873 hack/build-docker.sh push The push refers to a repository [localhost:32802/kubevirt/virt-controller] db9f8cf83859: Preparing efce1557ba86: Preparing 891e1e4ef82a: Preparing efce1557ba86: Pushed db9f8cf83859: Pushed 891e1e4ef82a: Pushed devel: digest: sha256:bf4e256e899befbddf6fb76795ce8c7a70f2fdad6e0f15a7de8d4b1873bd1126 size: 949 The push refers to a repository [localhost:32802/kubevirt/virt-launcher] 19036e0ec4da: Preparing 34cbbc75bab2: Preparing b613c48146a0: Preparing 42a0276c44e8: Preparing 30bd402486bb: Preparing 779823b58976: Preparing da38cf808aa5: Preparing b83399358a92: Preparing 186d8b3e4fd8: Preparing fa6154170bf5: Preparing 5eefb9960a36: Preparing 891e1e4ef82a: Preparing 779823b58976: Waiting da38cf808aa5: Waiting b83399358a92: Waiting 186d8b3e4fd8: Waiting 5eefb9960a36: Waiting fa6154170bf5: Waiting 891e1e4ef82a: Waiting 34cbbc75bab2: Pushed 19036e0ec4da: Pushed 42a0276c44e8: Pushed da38cf808aa5: Pushed b613c48146a0: Pushed b83399358a92: Pushed 186d8b3e4fd8: Pushed 891e1e4ef82a: Mounted from kubevirt/virt-controller fa6154170bf5: Pushed 30bd402486bb: Pushed 779823b58976: Pushed 5eefb9960a36: Pushed devel: digest: sha256:e58f65327723e28e298c64a770e6722af3a8d5da7841a4c0e32bcceae6d1467c size: 2828 The push refers to a repository [localhost:32802/kubevirt/virt-handler] 85841c6833ab: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-launcher 85841c6833ab: Pushed devel: digest: sha256:9a5f71301e33eac481812318807e5e60d8dd7091852c34f0cac3ceb324092b8c size: 741 The push refers to a repository [localhost:32802/kubevirt/virt-api] a2ffcdacc6db: Preparing 1cd776a5872d: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-handler 1cd776a5872d: Pushed a2ffcdacc6db: Pushed devel: digest: sha256:3b0f4145e8f712efb7118800ded0396fcde8034d4dfab43e70d032198f75abc8 size: 948 The push refers to a repository [localhost:32802/kubevirt/disks-images-provider] 031ac8f2509a: Preparing df0d85013ae0: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-api 031ac8f2509a: Pushed df0d85013ae0: Pushed devel: digest: sha256:098f6d91bff6540c79a1369460e7631608663f6d9e551d54640b6abbc1ec4ca5 size: 948 The push refers to a repository [localhost:32802/kubevirt/vm-killer] c6d1250c13a6: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/disks-images-provider c6d1250c13a6: Pushed devel: digest: sha256:2bf61472e17c1d898c9b6bcbce460f1608a6d243e6672057b589a914b6f6fbf8 size: 740 The push refers to a repository [localhost:32802/kubevirt/registry-disk-v1alpha] 3e288742e937: Preparing 7c38bbdf0880: Preparing 25edbec0eaea: Preparing 3e288742e937: Pushed 7c38bbdf0880: Pushed 25edbec0eaea: Pushed devel: digest: sha256:a42c235013abd2de955af6da60b8c240fe40990c7d74f3bffd1074a145751965 size: 948 The push refers to a repository [localhost:32802/kubevirt/cirros-registry-disk-demo] 619a5a8d842f: Preparing 3e288742e937: Preparing 7c38bbdf0880: Preparing 25edbec0eaea: Preparing 7c38bbdf0880: Mounted from kubevirt/registry-disk-v1alpha 3e288742e937: Mounted from kubevirt/registry-disk-v1alpha 25edbec0eaea: Mounted from kubevirt/registry-disk-v1alpha 619a5a8d842f: Pushed devel: digest: sha256:61c3fcf15a52e6d210bcd20bd436e42677881e665d0b8793b6b166e1fb971b18 size: 1160 The push refers to a repository [localhost:32802/kubevirt/fedora-cloud-registry-disk-demo] ade274325b0c: Preparing 3e288742e937: Preparing 7c38bbdf0880: Preparing 25edbec0eaea: Preparing 25edbec0eaea: Mounted from kubevirt/cirros-registry-disk-demo 3e288742e937: Mounted from kubevirt/cirros-registry-disk-demo 7c38bbdf0880: Mounted from kubevirt/cirros-registry-disk-demo ade274325b0c: Pushed devel: digest: sha256:bd5bc6f49b597101fc3aa9ad9cef7d91f46f4714d40dd84a0945306f5a3212e7 size: 1161 The push refers to a repository [localhost:32802/kubevirt/alpine-registry-disk-demo] e38aafaa8dac: Preparing 3e288742e937: Preparing 7c38bbdf0880: Preparing 25edbec0eaea: Preparing 7c38bbdf0880: Mounted from kubevirt/fedora-cloud-registry-disk-demo 3e288742e937: Mounted from kubevirt/fedora-cloud-registry-disk-demo 25edbec0eaea: Mounted from kubevirt/fedora-cloud-registry-disk-demo e38aafaa8dac: Pushed devel: digest: sha256:521a0434e914231c3d21993d9f7db5923691ebd34213aa49877e5ea194c019c9 size: 1160 The push refers to a repository [localhost:32802/kubevirt/subresource-access-test] d1b0ff598e07: Preparing c3b63a8b92e2: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/vm-killer c3b63a8b92e2: Pushed d1b0ff598e07: Pushed devel: digest: sha256:63ce65fcfb8ce8a54622323965a6731927150c3e66133bb3f1887e1c10e640e8 size: 948 The push refers to a repository [localhost:32802/kubevirt/winrmcli] 03859482cdc2: Preparing a0f8b95b0bdd: Preparing 2aa87109f2ed: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/subresource-access-test 03859482cdc2: Pushed 2aa87109f2ed: Pushed a0f8b95b0bdd: Pushed devel: digest: sha256:1c0d21015ef07d801c0fc365a8618a82a80c45234235dc4ff0c396b7ba73c107 size: 1165 The push refers to a repository [localhost:32802/kubevirt/example-hook-sidecar] 0adff1d9174f: Preparing 39bae602f753: Preparing 0adff1d9174f: Pushed 39bae602f753: Pushed devel: digest: sha256:83ec651d4912326d223ff1eed2c21c842e5da7e8cd3aceff02e96ed52c659bdc size: 740 make[1]: Leaving directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' Done ./cluster/clean.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release0 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release0 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-126-g81b07a4 ++ KUBEVIRT_VERSION=v0.7.0-126-g81b07a4 + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:50a4b8ee3e07d592e7e4fbf3eb1401980a5947499dfdc3d847c085b5775aaa9a ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:32802/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Cleaning up ...' Cleaning up ... + cluster/kubectl.sh get vmis --all-namespaces -o=custom-columns=NAME:.metadata.name,NAMESPACE:.metadata.namespace,FINALIZERS:.metadata.finalizers --no-headers + grep foregroundDeleteVirtualMachine + read p error: the server doesn't have a resource type "vmis" + _kubectl delete ds -l kubevirt.io -n kube-system --cascade=false --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=libvirt --force --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=virt-handler --force --grace-period 0 No resources found + namespaces=(default ${namespace}) + for i in '${namespaces[@]}' + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete deployment -l kubevirt.io No resources found + _kubectl -n default delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rs -l kubevirt.io No resources found + _kubectl -n default delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete services -l kubevirt.io No resources found + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n default delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete secrets -l kubevirt.io No resources found + _kubectl -n default delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pv -l kubevirt.io No resources found + _kubectl -n default delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pvc -l kubevirt.io No resources found + _kubectl -n default delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete ds -l kubevirt.io No resources found + _kubectl -n default delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n default delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pods -l kubevirt.io No resources found + _kubectl -n default delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n default delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rolebinding -l kubevirt.io No resources found + _kubectl -n default delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete roles -l kubevirt.io No resources found + _kubectl -n default delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterroles -l kubevirt.io No resources found + _kubectl -n default delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n default get crd offlinevirtualmachines.kubevirt.io ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n default get crd offlinevirtualmachines.kubevirt.io ++ wc -l Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + for i in '${namespaces[@]}' + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete deployment -l kubevirt.io No resources found + _kubectl -n kube-system delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rs -l kubevirt.io No resources found + _kubectl -n kube-system delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete services -l kubevirt.io No resources found + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n kube-system delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete secrets -l kubevirt.io No resources found + _kubectl -n kube-system delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pv -l kubevirt.io No resources found + _kubectl -n kube-system delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pvc -l kubevirt.io No resources found + _kubectl -n kube-system delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete ds -l kubevirt.io No resources found + _kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n kube-system delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pods -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete roles -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterroles -l kubevirt.io No resources found + _kubectl -n kube-system delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io ++ wc -l ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + sleep 2 + echo Done Done ./cluster/deploy.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release0 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release0 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-126-g81b07a4 ++ KUBEVIRT_VERSION=v0.7.0-126-g81b07a4 + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:50a4b8ee3e07d592e7e4fbf3eb1401980a5947499dfdc3d847c085b5775aaa9a ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:32802/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Deploying ...' Deploying ... + [[ -z openshift-3.10-release ]] + [[ openshift-3.10-release =~ .*-dev ]] + [[ openshift-3.10-release =~ .*-release ]] + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/demo-content.yaml =~ .*demo.* ]] + continue + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml =~ .*demo.* ]] + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml clusterrole.rbac.authorization.k8s.io "kubevirt.io:admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:edit" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:view" created serviceaccount "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver-auth-delegator" created rolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created role.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-controller" created serviceaccount "kubevirt-controller" created serviceaccount "kubevirt-privileged" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller-cluster-admin" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-privileged-cluster-admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:default" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt.io:default" created service "virt-api" created deployment.extensions "virt-api" created deployment.extensions "virt-controller" created daemonset.extensions "virt-handler" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstances.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancereplicasets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancepresets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachines.kubevirt.io" created + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R persistentvolumeclaim "disk-alpine" created persistentvolume "host-path-disk-alpine" created persistentvolumeclaim "disk-custom" created persistentvolume "host-path-disk-custom" created daemonset.extensions "disks-images-provider" created serviceaccount "kubevirt-testing" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-testing-cluster-admin" created + [[ os-3.10.0 =~ os-* ]] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-controller"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-testing"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-privileged"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-apiserver"] + _kubectl adm policy add-scc-to-user privileged admin + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged admin scc "privileged" added to: ["admin"] + echo Done Done + namespaces=(kube-system default) + [[ kube-system != \k\u\b\e\-\s\y\s\t\e\m ]] + timeout=300 + sample=30 + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n kube-system --no-headers ++ cluster/kubectl.sh get pods -n kube-system --no-headers ++ grep -v Running + '[' -n 'disks-images-provider-9tfn5 0/1 ContainerCreating 0 1s disks-images-provider-wqlgh 0/1 ContainerCreating 0 1s virt-api-7d79764579-85l9v 0/1 ContainerCreating 0 11s virt-api-7d79764579-g7k67 0/1 ContainerCreating 0 11s virt-controller-7d57d96b65-frkzz 0/1 ContainerCreating 0 11s virt-controller-7d57d96b65-r4kvx 0/1 ContainerCreating 0 11s virt-handler-6qv78 0/1 ContainerCreating 0 11s virt-handler-qsw9k 0/1 ContainerCreating 0 11s' ']' + echo 'Waiting for kubevirt pods to enter the Running state ...' Waiting for kubevirt pods to enter the Running state ... + kubectl get pods -n kube-system --no-headers + grep -v Running + cluster/kubectl.sh get pods -n kube-system --no-headers disks-images-provider-9tfn5 0/1 ContainerCreating 0 2s disks-images-provider-wqlgh 0/1 ContainerCreating 0 2s virt-api-7d79764579-85l9v 0/1 ContainerCreating 0 12s virt-api-7d79764579-g7k67 0/1 ContainerCreating 0 12s virt-controller-7d57d96b65-frkzz 0/1 ContainerCreating 0 12s virt-controller-7d57d96b65-r4kvx 0/1 ContainerCreating 0 12s virt-handler-6qv78 0/1 ContainerCreating 0 12s virt-handler-qsw9k 0/1 ContainerCreating 0 12s + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system --no-headers ++ grep -v Running ++ cluster/kubectl.sh get pods -n kube-system --no-headers + '[' -n 'disks-images-provider-9tfn5 0/1 ContainerCreating 0 32s disks-images-provider-wqlgh 0/1 ContainerCreating 0 32s virt-api-7d79764579-85l9v 0/1 Error 0 42s virt-handler-6qv78 0/1 ContainerCreating 0 42s' ']' + echo 'Waiting for kubevirt pods to enter the Running state ...' Waiting for kubevirt pods to enter the Running state ... + kubectl get pods -n kube-system --no-headers + cluster/kubectl.sh get pods -n kube-system --no-headers + grep -v Running disks-images-provider-9tfn5 0/1 ContainerCreating 0 33s disks-images-provider-wqlgh 0/1 ContainerCreating 0 33s virt-handler-6qv78 0/1 ContainerCreating 0 43s + sleep 30 + current_time=60 + '[' 60 -gt 300 ']' ++ kubectl get pods -n kube-system --no-headers ++ grep -v Running ++ cluster/kubectl.sh get pods -n kube-system --no-headers + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n '' ']' + kubectl get pods -n kube-system + cluster/kubectl.sh get pods -n kube-system NAME READY STATUS RESTARTS AGE disks-images-provider-9tfn5 1/1 Running 0 1m disks-images-provider-wqlgh 1/1 Running 0 1m master-api-node01 1/1 Running 1 18d master-controllers-node01 1/1 Running 2 18d master-etcd-node01 1/1 Running 1 18d virt-api-7d79764579-85l9v 1/1 Running 1 1m virt-api-7d79764579-g7k67 1/1 Running 0 1m virt-controller-7d57d96b65-frkzz 1/1 Running 0 1m virt-controller-7d57d96b65-r4kvx 1/1 Running 0 1m virt-handler-6qv78 1/1 Running 0 1m virt-handler-qsw9k 1/1 Running 0 1m + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n default --no-headers ++ cluster/kubectl.sh get pods -n default --no-headers ++ grep -v Running + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ cluster/kubectl.sh get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false + '[' -n '' ']' + kubectl get pods -n default + cluster/kubectl.sh get pods -n default NAME READY STATUS RESTARTS AGE docker-registry-1-rl562 1/1 Running 2 18d registry-console-1-rw9zf 1/1 Running 2 18d router-1-6cch9 1/1 Running 1 18d + kubectl version + cluster/kubectl.sh version oc v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 features: Basic-Auth GSSAPI Kerberos SPNEGO Server https://127.0.0.1:32799 openshift v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 + ginko_params='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + [[ openshift-3.10-release =~ windows.* ]] + FUNC_TEST_ARGS='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + make functest hack/dockerized "hack/build-func-tests.sh" sha256:559a45ac63f40982ccce3a1b80cb62788566f2032c847ad9c45ee993eb9c48d4 go version go1.10 linux/amd64 Waiting for rsyncd to be ready. go version go1.10 linux/amd64 Compiling tests... compiled tests.test hack/functests.sh Running Suite: Tests Suite ========================== Random Seed: 1532596916 Will run 148 of 148 specs S [SKIPPING] in Spec Setup (BeforeEach) [0.028 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 should succeed to start a vmi [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:133 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1352 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.011 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 should succeed to stop a running vmi [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:139 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1352 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.009 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with winrm connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:150 should have correct UUID /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:192 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1352 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.010 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with winrm connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:150 should have pod IP /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:208 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1352 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.019 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with kubectl command [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:226 should succeed to start a vmi /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:242 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1352 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.008 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with kubectl command [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:226 should succeed to stop a vmi /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:250 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1352 ------------------------------ Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 09:20:26 http: TLS handshake error from 10.129.0.1:48476: EOF level=info timestamp=2018-07-26T09:20:29.992375Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:20:36 http: TLS handshake error from 10.129.0.1:48492: EOF 2018/07/26 09:20:46 http: TLS handshake error from 10.129.0.1:48530: EOF 2018/07/26 09:20:56 http: TLS handshake error from 10.129.0.1:48538: EOF level=info timestamp=2018-07-26T09:21:05.051159Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:21:06 http: TLS handshake error from 10.129.0.1:48546: EOF 2018/07/26 09:21:17 http: TLS handshake error from 10.129.0.1:48554: EOF 2018/07/26 09:21:27 http: TLS handshake error from 10.129.0.1:48562: EOF level=info timestamp=2018-07-26T09:21:30.245317Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:21:36 http: TLS handshake error from 10.129.0.1:48582: EOF 2018/07/26 09:21:46 http: TLS handshake error from 10.129.0.1:48590: EOF 2018/07/26 09:21:56 http: TLS handshake error from 10.129.0.1:48598: EOF 2018/07/26 09:22:06 http: TLS handshake error from 10.129.0.1:48606: EOF 2018/07/26 09:22:16 http: TLS handshake error from 10.129.0.1:48614: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T09:21:35.991184Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:21:37 http: TLS handshake error from 10.129.0.1:54894: EOF level=info timestamp=2018-07-26T09:21:47.835125Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:21:47.877011Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:21:47 http: TLS handshake error from 10.129.0.1:54902: EOF level=info timestamp=2018-07-26T09:21:57.914513Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:21:57 http: TLS handshake error from 10.129.0.1:54910: EOF level=info timestamp=2018-07-26T09:21:59.958267Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:22:05.216460Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:22:05.217157Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:22:07 http: TLS handshake error from 10.129.0.1:54918: EOF level=info timestamp=2018-07-26T09:22:07.961168Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:22:17.925489Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:22:17 http: TLS handshake error from 10.129.0.1:54926: EOF level=info timestamp=2018-07-26T09:22:18.000268Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T09:19:26.034193Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-07-26T09:19:26.034264Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-07-26T09:19:26.034287Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-07-26T09:19:26.034303Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-26T09:19:26.034324Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-26T09:19:26.034338Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-07-26T09:19:26.034354Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-07-26T09:19:26.034432Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-26T09:19:26.039712Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-26T09:19:26.042099Z pos=preset.go:71 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-26T09:19:26.042147Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-26T09:19:26.042176Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-26T09:21:58.361844Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:21:58.395554Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:21:58.763376Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirm86d\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T09:20:03.126247Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-26T09:20:03.137685Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-26T09:20:03.139588Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-26T09:20:03.240309Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-26T09:20:03.383976Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-26T09:20:03.387200Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T09:22:25.336600Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:25.336921Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:25.337024Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:25.377386Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:25.377710Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:25.378058Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:25.474534Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:25.474820Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:25.474892Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:25.635738Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:25.635923Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:25.636026Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:25.956382Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:25.956768Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:25.956882Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" Pod name: virt-launcher-testvmirm86d-fzwl7 Pod phase: Running goroutine 14 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4202040c0, 0xc4201a8c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42193a040, 0xc4200b7778, 0xc42000d8a0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c1e0, 0xc42000e078, 0x13, 0x131ed4d, 0x4, 0xc42006df18, 0x3, 0x3, 0xc420088140, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c1e0, 0xc42000e078, 0x13, 0xc42195e718, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc420758140) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420354040, 0xc4200e6320, 0xc421a45a70, 0xc421a45a80, 0xc421956300, 0xc42192cd40, 0x10f5e80, 0xc4200b7778, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 09:22:36 http: TLS handshake error from 10.129.0.1:48630: EOF 2018/07/26 09:22:46 http: TLS handshake error from 10.129.0.1:48638: EOF 2018/07/26 09:22:56 http: TLS handshake error from 10.129.0.1:48646: EOF 2018/07/26 09:23:06 http: TLS handshake error from 10.129.0.1:48654: EOF 2018/07/26 09:23:16 http: TLS handshake error from 10.129.0.1:48662: EOF 2018/07/26 09:23:26 http: TLS handshake error from 10.129.0.1:48670: EOF 2018/07/26 09:23:36 http: TLS handshake error from 10.129.0.1:48680: EOF 2018/07/26 09:23:46 http: TLS handshake error from 10.129.0.1:48688: EOF 2018/07/26 09:23:56 http: TLS handshake error from 10.129.0.1:48696: EOF 2018/07/26 09:24:06 http: TLS handshake error from 10.129.0.1:48704: EOF 2018/07/26 09:24:16 http: TLS handshake error from 10.129.0.1:48712: EOF 2018/07/26 09:24:26 http: TLS handshake error from 10.129.0.1:48720: EOF 2018/07/26 09:24:36 http: TLS handshake error from 10.129.0.1:48728: EOF 2018/07/26 09:24:46 http: TLS handshake error from 10.129.0.1:48736: EOF 2018/07/26 09:24:56 http: TLS handshake error from 10.129.0.1:48744: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running 2018/07/26 09:24:27 http: TLS handshake error from 10.129.0.1:55032: EOF level=info timestamp=2018-07-26T09:24:28.627711Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:24:28.891250Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:24:28.912213Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:24:30.033612Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:24:35.610841Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:24:35.615387Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:24:37 http: TLS handshake error from 10.129.0.1:55040: EOF level=info timestamp=2018-07-26T09:24:38.676977Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:24:44.089104Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:24:44.090486Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/26 09:24:47 http: TLS handshake error from 10.129.0.1:55048: EOF level=info timestamp=2018-07-26T09:24:48.184332Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:24:48.718182Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:24:57 http: TLS handshake error from 10.129.0.1:55056: EOF Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T09:19:26.034193Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-07-26T09:19:26.034264Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-07-26T09:19:26.034287Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-07-26T09:19:26.034303Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-26T09:19:26.034324Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-26T09:19:26.034338Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-07-26T09:19:26.034354Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-07-26T09:19:26.034432Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-26T09:19:26.039712Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-26T09:19:26.042099Z pos=preset.go:71 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-26T09:19:26.042147Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-26T09:19:26.042176Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-26T09:21:58.361844Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:21:58.395554Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:21:58.763376Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirm86d\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T09:20:03.126247Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-26T09:20:03.137685Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-26T09:20:03.139588Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-26T09:20:03.240309Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-26T09:20:03.383976Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-26T09:20:03.387200Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-handler-qsw9k Pod phase: Running level=error timestamp=2018-07-26T09:22:26.597561Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:26.597699Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:27.881120Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:27.881345Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:27.881416Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:30.441720Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:30.442013Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:30.442151Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:35.562457Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:35.562655Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:35.562869Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:45.803290Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:22:45.819147Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:22:45.821083Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:22:45.821157Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmirm86d-fzwl7 Pod phase: Failed goroutine 14 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4202040c0, 0xc4201a8c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42193a040, 0xc4200b7778, 0xc42000d8a0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c1e0, 0xc42000e078, 0x13, 0x131ed4d, 0x4, 0xc42006df18, 0x3, 0x3, 0xc420088140, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c1e0, 0xc42000e078, 0x13, 0xc42195e718, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc420758140) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420354040, 0xc4200e6320, 0xc421a45a70, 0xc421a45a80, 0xc421956300, 0xc42192cd40, 0x10f5e80, 0xc4200b7778, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 • Failure [180.651 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with 3 CPU cores /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:56 should report 3 cpu cores under guest OS [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:62 Unexpected Warning event received. Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:245 ------------------------------ STEP: Starting a VirtualMachineInstance level=info timestamp=2018-07-26T09:21:59.187624Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmirm86d-fzwl7" level=info timestamp=2018-07-26T09:22:25.080355Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmirm86d-fzwl7" level=error timestamp=2018-07-26T09:22:25.734352Z pos=utils.go:241 component=tests reason="unexpected warning event received" msg="unexpected EOF" Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 09:22:56 http: TLS handshake error from 10.129.0.1:48646: EOF 2018/07/26 09:23:06 http: TLS handshake error from 10.129.0.1:48654: EOF 2018/07/26 09:23:16 http: TLS handshake error from 10.129.0.1:48662: EOF 2018/07/26 09:23:26 http: TLS handshake error from 10.129.0.1:48670: EOF 2018/07/26 09:23:36 http: TLS handshake error from 10.129.0.1:48680: EOF 2018/07/26 09:23:46 http: TLS handshake error from 10.129.0.1:48688: EOF 2018/07/26 09:23:56 http: TLS handshake error from 10.129.0.1:48696: EOF 2018/07/26 09:24:06 http: TLS handshake error from 10.129.0.1:48704: EOF 2018/07/26 09:24:16 http: TLS handshake error from 10.129.0.1:48712: EOF 2018/07/26 09:24:26 http: TLS handshake error from 10.129.0.1:48720: EOF 2018/07/26 09:24:36 http: TLS handshake error from 10.129.0.1:48728: EOF 2018/07/26 09:24:46 http: TLS handshake error from 10.129.0.1:48736: EOF 2018/07/26 09:24:56 http: TLS handshake error from 10.129.0.1:48744: EOF 2018/07/26 09:25:06 http: TLS handshake error from 10.129.0.1:48752: EOF 2018/07/26 09:25:16 http: TLS handshake error from 10.129.0.1:48760: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T09:24:35.615387Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:24:37 http: TLS handshake error from 10.129.0.1:55040: EOF level=info timestamp=2018-07-26T09:24:38.676977Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:24:44.089104Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:24:44.090486Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/26 09:24:47 http: TLS handshake error from 10.129.0.1:55048: EOF level=info timestamp=2018-07-26T09:24:48.184332Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:24:48.718182Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:24:57 http: TLS handshake error from 10.129.0.1:55056: EOF level=info timestamp=2018-07-26T09:24:58.760203Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:25:00.016401Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:25:05.684996Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:25:05.688671Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:25:07 http: TLS handshake error from 10.129.0.1:55064: EOF level=info timestamp=2018-07-26T09:25:08.965702Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T09:19:26.034287Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-07-26T09:19:26.034303Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-26T09:19:26.034324Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-26T09:19:26.034338Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-07-26T09:19:26.034354Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-07-26T09:19:26.034432Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-26T09:19:26.039712Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-26T09:19:26.042099Z pos=preset.go:71 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-26T09:19:26.042147Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-26T09:19:26.042176Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-26T09:21:58.361844Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:21:58.395554Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:21:58.763376Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirm86d\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:24:58.738810Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:24:58.739059Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T09:25:17.858173Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:25:17.861241Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:25:17.861618Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi5wjvx" level=info timestamp=2018-07-26T09:25:17.862138Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:25:17.862521Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:25:17.865011Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi5wjvx" level=info timestamp=2018-07-26T09:25:17.872226Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:25:17.872383Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:25:17.872463Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi5wjvx" level=info timestamp=2018-07-26T09:25:17.912727Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:25:17.912949Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:25:17.913065Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi5wjvx" level=info timestamp=2018-07-26T09:25:17.993463Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:25:17.993682Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:25:17.993820Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi5wjvx" Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T09:22:27.881416Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:30.441720Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:30.442013Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:30.442151Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:35.562457Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:35.562655Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:35.562869Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:45.803290Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:22:45.819147Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:22:45.821083Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:22:45.821157Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:24:58.523687Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:24:58.524006Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:24:58.565159Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:24:58.565329Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi5wjvx-xtzwq Pod phase: Running [signal SIGSEGV: segmentation violation code=0x1 addr=0x0 pc=0x1061516] goroutine 32 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4200a65e0, 0xc42019ec80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421926080, 0xc42000f370, 0xc420187a40, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c3c0, 0xc42000e078, 0x13, 0x131ed4d, 0x4, 0xc42006ef18, 0x3, 0x3, 0xc4200ba080, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c3c0, 0xc42000e078, 0x13, 0xc42005cf18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4202af0c0, 0xc421924230, 0xc421b45000, 0xc421b45010, 0xc421944300, 0xc421916ce0, 0x10f5e80, 0xc42000f370, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running level=info timestamp=2018-07-26T09:26:00.015167Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:26:06 http: TLS handshake error from 10.129.0.1:48800: EOF 2018/07/26 09:26:16 http: TLS handshake error from 10.129.0.1:48808: EOF 2018/07/26 09:26:26 http: TLS handshake error from 10.129.0.1:48816: EOF 2018/07/26 09:26:36 http: TLS handshake error from 10.129.0.1:48826: EOF 2018/07/26 09:26:46 http: TLS handshake error from 10.129.0.1:48834: EOF 2018/07/26 09:26:56 http: TLS handshake error from 10.129.0.1:48842: EOF level=info timestamp=2018-07-26T09:27:00.051821Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:27:06 http: TLS handshake error from 10.129.0.1:48850: EOF 2018/07/26 09:27:16 http: TLS handshake error from 10.129.0.1:48858: EOF 2018/07/26 09:27:26 http: TLS handshake error from 10.129.0.1:48866: EOF level=info timestamp=2018-07-26T09:27:29.965846Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:27:36 http: TLS handshake error from 10.129.0.1:48874: EOF 2018/07/26 09:27:46 http: TLS handshake error from 10.129.0.1:48882: EOF 2018/07/26 09:27:56 http: TLS handshake error from 10.129.0.1:48890: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running 2018/07/26 09:27:17 http: TLS handshake error from 10.129.0.1:55170: EOF level=info timestamp=2018-07-26T09:27:18.454614Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:27:19.587454Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:27:27 http: TLS handshake error from 10.129.0.1:55178: EOF level=info timestamp=2018-07-26T09:27:29.625711Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:27:36.130495Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:27:36.132437Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:27:37 http: TLS handshake error from 10.129.0.1:55186: EOF level=info timestamp=2018-07-26T09:27:39.673392Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:27:44.096756Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:27:44.098129Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/26 09:27:47 http: TLS handshake error from 10.129.0.1:55194: EOF level=info timestamp=2018-07-26T09:27:48.506515Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:27:49.722284Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:27:57 http: TLS handshake error from 10.129.0.1:55202: EOF Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T09:19:26.034287Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-07-26T09:19:26.034303Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-26T09:19:26.034324Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-26T09:19:26.034338Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-07-26T09:19:26.034354Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-07-26T09:19:26.034432Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-26T09:19:26.039712Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-26T09:19:26.042099Z pos=preset.go:71 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-26T09:19:26.042147Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-26T09:19:26.042176Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-26T09:21:58.361844Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:21:58.395554Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:21:58.763376Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirm86d\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:24:58.738810Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:24:58.739059Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=error timestamp=2018-07-26T09:25:19.116563Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:25:19.116729Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi5wjvx" level=info timestamp=2018-07-26T09:25:20.397071Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:25:20.398011Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:25:20.398712Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi5wjvx" level=info timestamp=2018-07-26T09:25:22.959549Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:25:22.959995Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:25:22.961105Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi5wjvx" level=info timestamp=2018-07-26T09:25:28.082128Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:25:28.083580Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:25:28.085188Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi5wjvx" level=info timestamp=2018-07-26T09:25:38.326689Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:25:38.352270Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:25:38.353044Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:25:38.353794Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T09:22:27.881416Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:30.441720Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:30.442013Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:30.442151Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:35.562457Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:35.562655Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:35.562869Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:45.803290Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:22:45.819147Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:22:45.821083Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:22:45.821157Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:24:58.523687Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:24:58.524006Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:24:58.565159Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:24:58.565329Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi5wjvx-xtzwq Pod phase: Failed goroutine 32 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4200a65e0, 0xc42019ec80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421926080, 0xc42000f370, 0xc420187a40, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c3c0, 0xc42000e078, 0x13, 0x131ed4d, 0x4, 0xc42006ef18, 0x3, 0x3, 0xc4200ba080, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c3c0, 0xc42000e078, 0x13, 0xc42005cf18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4202af0c0, 0xc421924230, 0xc421b45000, 0xc421b45010, 0xc421944300, 0xc421916ce0, 0x10f5e80, 0xc42000f370, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 • Failure [180.491 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:108 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-2Mi [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Unexpected Warning event received. Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:245 ------------------------------ STEP: Starting a VM level=info timestamp=2018-07-26T09:24:59.251792Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmi5wjvx-xtzwq" level=info timestamp=2018-07-26T09:25:17.268577Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmi5wjvx-xtzwq" level=error timestamp=2018-07-26T09:25:17.685055Z pos=utils.go:241 component=tests reason="unexpected warning event received" msg="unexpected EOF" S [SKIPPING] [0.214 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:108 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-1Gi [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 No node with hugepages hugepages-1Gi capacity /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:160 ------------------------------ • Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 09:26:06 http: TLS handshake error from 10.129.0.1:48800: EOF 2018/07/26 09:26:16 http: TLS handshake error from 10.129.0.1:48808: EOF 2018/07/26 09:26:26 http: TLS handshake error from 10.129.0.1:48816: EOF 2018/07/26 09:26:36 http: TLS handshake error from 10.129.0.1:48826: EOF 2018/07/26 09:26:46 http: TLS handshake error from 10.129.0.1:48834: EOF 2018/07/26 09:26:56 http: TLS handshake error from 10.129.0.1:48842: EOF level=info timestamp=2018-07-26T09:27:00.051821Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:27:06 http: TLS handshake error from 10.129.0.1:48850: EOF 2018/07/26 09:27:16 http: TLS handshake error from 10.129.0.1:48858: EOF 2018/07/26 09:27:26 http: TLS handshake error from 10.129.0.1:48866: EOF level=info timestamp=2018-07-26T09:27:29.965846Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:27:36 http: TLS handshake error from 10.129.0.1:48874: EOF 2018/07/26 09:27:46 http: TLS handshake error from 10.129.0.1:48882: EOF 2018/07/26 09:27:56 http: TLS handshake error from 10.129.0.1:48890: EOF 2018/07/26 09:28:06 http: TLS handshake error from 10.129.0.1:48898: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T09:27:36.132437Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:27:37 http: TLS handshake error from 10.129.0.1:55186: EOF level=info timestamp=2018-07-26T09:27:39.673392Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:27:44.096756Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:27:44.098129Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/26 09:27:47 http: TLS handshake error from 10.129.0.1:55194: EOF level=info timestamp=2018-07-26T09:27:48.506515Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:27:49.722284Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:27:57 http: TLS handshake error from 10.129.0.1:55202: EOF level=info timestamp=2018-07-26T09:27:59.771891Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:28:00.024109Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:28:06.216360Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:28:06.222108Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:28:07 http: TLS handshake error from 10.129.0.1:55210: EOF level=info timestamp=2018-07-26T09:28:09.891902Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T09:19:26.042147Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-26T09:19:26.042176Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-26T09:21:58.361844Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:21:58.395554Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:21:58.763376Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirm86d\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:24:58.738810Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:24:58.739059Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:27:59.274034Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqr64l kind= uid=2fffeb51-90b6-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:27:59.274190Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqr64l kind= uid=2fffeb51-90b6-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:27:59.320745Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:27:59.338025Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:27:59.349887Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:28:00.337718Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqr64l, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2fffeb51-90b6-11e8-8e98-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:28:00.511208Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:28:00.511343Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T09:28:16.390212Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:28:16.665025Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 reason="unexpected EOF" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:28:16.701851Z pos=vm.go:251 component=virt-handler reason="unexpected EOF" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiwgrmw" level=info timestamp=2018-07-26T09:28:16.704970Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:28:16.705177Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:28:16.705456Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiwgrmw" level=info timestamp=2018-07-26T09:28:16.707718Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:28:16.708403Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:28:16.708661Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiwgrmw" level=info timestamp=2018-07-26T09:28:16.730887Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:28:16.731101Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:28:16.731198Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiwgrmw" level=info timestamp=2018-07-26T09:28:16.771530Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:28:16.772177Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:28:16.772520Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiwgrmw" Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T09:22:27.881416Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:30.441720Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:30.442013Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:30.442151Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:35.562457Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:35.562655Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:35.562869Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:45.803290Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:22:45.819147Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:22:45.821083Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:22:45.821157Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:24:58.523687Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:24:58.524006Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:24:58.565159Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:24:58.565329Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiwgrmw-vlcth Pod phase: Running [signal SIGSEGV: segmentation violation code=0x1 addr=0x0 pc=0x1061516] goroutine 14 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc420200100, 0xc4201a2c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42059e020, 0xc42065a050, 0xc42044c520, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc420096120, 0xc4200b65f0, 0x13, 0x131ed4d, 0x4, 0xc421952f18, 0x3, 0x3, 0xc4200ba280, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc420096120, 0xc4200b65f0, 0x13, 0xc420059718, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc42034e000, 0xc4200b8500, 0xc4208e8c30, 0xc4208e8c40, 0xc421927c80, 0xc42055c060, 0x10f5e80, 0xc42065a050, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 09:28:36 http: TLS handshake error from 10.129.0.1:48922: EOF 2018/07/26 09:28:46 http: TLS handshake error from 10.129.0.1:48930: EOF 2018/07/26 09:28:56 http: TLS handshake error from 10.129.0.1:48938: EOF 2018/07/26 09:29:06 http: TLS handshake error from 10.129.0.1:48946: EOF 2018/07/26 09:29:16 http: TLS handshake error from 10.129.0.1:48954: EOF 2018/07/26 09:29:26 http: TLS handshake error from 10.129.0.1:48962: EOF 2018/07/26 09:29:36 http: TLS handshake error from 10.129.0.1:48972: EOF 2018/07/26 09:29:46 http: TLS handshake error from 10.129.0.1:48980: EOF 2018/07/26 09:29:56 http: TLS handshake error from 10.129.0.1:48988: EOF 2018/07/26 09:30:06 http: TLS handshake error from 10.129.0.1:48996: EOF 2018/07/26 09:30:16 http: TLS handshake error from 10.129.0.1:49004: EOF 2018/07/26 09:30:26 http: TLS handshake error from 10.129.0.1:49012: EOF 2018/07/26 09:30:36 http: TLS handshake error from 10.129.0.1:49020: EOF 2018/07/26 09:30:46 http: TLS handshake error from 10.129.0.1:49028: EOF 2018/07/26 09:30:56 http: TLS handshake error from 10.129.0.1:49036: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T09:30:20.643061Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:30:27 http: TLS handshake error from 10.129.0.1:55324: EOF level=info timestamp=2018-07-26T09:30:30.056515Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:30:30.696761Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:30:36.633087Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:30:36.643175Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:30:37 http: TLS handshake error from 10.129.0.1:55332: EOF level=info timestamp=2018-07-26T09:30:40.745734Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:30:43.056505Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:30:43.057990Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/26 09:30:47 http: TLS handshake error from 10.129.0.1:55340: EOF level=info timestamp=2018-07-26T09:30:48.858452Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:30:50.793753Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:30:57 http: TLS handshake error from 10.129.0.1:55348: EOF level=info timestamp=2018-07-26T09:31:00.051176Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T09:19:26.042147Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-26T09:19:26.042176Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-26T09:21:58.361844Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:21:58.395554Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:21:58.763376Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirm86d\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:24:58.738810Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:24:58.739059Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:27:59.274034Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqr64l kind= uid=2fffeb51-90b6-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:27:59.274190Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqr64l kind= uid=2fffeb51-90b6-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:27:59.320745Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:27:59.338025Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:27:59.349887Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:28:00.337718Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqr64l, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2fffeb51-90b6-11e8-8e98-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:28:00.511208Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:28:00.511343Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=error timestamp=2018-07-26T09:28:17.978128Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:28:17.978220Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiwgrmw" level=info timestamp=2018-07-26T09:28:19.258587Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:28:19.258830Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:28:19.258918Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiwgrmw" level=info timestamp=2018-07-26T09:28:21.819212Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:28:21.819618Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:28:21.819909Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiwgrmw" level=info timestamp=2018-07-26T09:28:26.940429Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:28:26.940793Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:28:26.940964Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiwgrmw" level=info timestamp=2018-07-26T09:28:37.181368Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:28:37.208232Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:28:37.209504Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:28:37.209656Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T09:22:27.881416Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:30.441720Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:30.442013Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:30.442151Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:35.562457Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:35.562655Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:35.562869Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:45.803290Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:22:45.819147Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:22:45.821083Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:22:45.821157Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:24:58.523687Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:24:58.524006Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:24:58.565159Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:24:58.565329Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiwgrmw-vlcth Pod phase: Failed goroutine 14 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc420200100, 0xc4201a2c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42059e020, 0xc42065a050, 0xc42044c520, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc420096120, 0xc4200b65f0, 0x13, 0x131ed4d, 0x4, 0xc421952f18, 0x3, 0x3, 0xc4200ba280, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc420096120, 0xc4200b65f0, 0x13, 0xc420059718, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc42034e000, 0xc4200b8500, 0xc4208e8c30, 0xc4208e8c40, 0xc421927c80, 0xc42055c060, 0x10f5e80, 0xc42065a050, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 ------------------------------ • Failure in Spec Setup (BeforeEach) [180.469 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:238 when CPU model defined [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:284 should report defined CPU model /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:285 Unexpected Warning event received. Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:245 ------------------------------ level=info timestamp=2018-07-26T09:28:01.087510Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmiwgrmw-vlcth" level=info timestamp=2018-07-26T09:28:16.229866Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmiwgrmw-vlcth" level=error timestamp=2018-07-26T09:28:16.531300Z pos=utils.go:241 component=tests reason="unexpected warning event received" msg="unexpected EOF" Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 09:28:56 http: TLS handshake error from 10.129.0.1:48938: EOF 2018/07/26 09:29:06 http: TLS handshake error from 10.129.0.1:48946: EOF 2018/07/26 09:29:16 http: TLS handshake error from 10.129.0.1:48954: EOF 2018/07/26 09:29:26 http: TLS handshake error from 10.129.0.1:48962: EOF 2018/07/26 09:29:36 http: TLS handshake error from 10.129.0.1:48972: EOF 2018/07/26 09:29:46 http: TLS handshake error from 10.129.0.1:48980: EOF 2018/07/26 09:29:56 http: TLS handshake error from 10.129.0.1:48988: EOF 2018/07/26 09:30:06 http: TLS handshake error from 10.129.0.1:48996: EOF 2018/07/26 09:30:16 http: TLS handshake error from 10.129.0.1:49004: EOF 2018/07/26 09:30:26 http: TLS handshake error from 10.129.0.1:49012: EOF 2018/07/26 09:30:36 http: TLS handshake error from 10.129.0.1:49020: EOF 2018/07/26 09:30:46 http: TLS handshake error from 10.129.0.1:49028: EOF 2018/07/26 09:30:56 http: TLS handshake error from 10.129.0.1:49036: EOF 2018/07/26 09:31:06 http: TLS handshake error from 10.129.0.1:49044: EOF 2018/07/26 09:31:16 http: TLS handshake error from 10.129.0.1:49052: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T09:30:36.643175Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:30:37 http: TLS handshake error from 10.129.0.1:55332: EOF level=info timestamp=2018-07-26T09:30:40.745734Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:30:43.056505Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:30:43.057990Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/26 09:30:47 http: TLS handshake error from 10.129.0.1:55340: EOF level=info timestamp=2018-07-26T09:30:48.858452Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:30:50.793753Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:30:57 http: TLS handshake error from 10.129.0.1:55348: EOF level=info timestamp=2018-07-26T09:31:00.051176Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:31:00.877269Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:31:06.946078Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:31:06.947626Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:31:07 http: TLS handshake error from 10.129.0.1:55356: EOF level=info timestamp=2018-07-26T09:31:11.111896Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T09:21:58.361844Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:21:58.395554Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:21:58.763376Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirm86d\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:24:58.738810Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:24:58.739059Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:27:59.274034Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqr64l kind= uid=2fffeb51-90b6-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:27:59.274190Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqr64l kind= uid=2fffeb51-90b6-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:27:59.320745Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:27:59.338025Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:27:59.349887Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:28:00.337718Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqr64l, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2fffeb51-90b6-11e8-8e98-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:28:00.511208Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:28:00.511343Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:31:00.978493Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:31:00.978616Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T09:31:17.271800Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:31:17.605529Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 reason="unexpected EOF" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:31:17.629892Z pos=vm.go:251 component=virt-handler reason="unexpected EOF" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiv2sh9" level=info timestamp=2018-07-26T09:31:17.631317Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:31:17.631606Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:31:17.631980Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiv2sh9" level=info timestamp=2018-07-26T09:31:17.638809Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:31:17.639264Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:31:17.639457Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiv2sh9" level=info timestamp=2018-07-26T09:31:17.659941Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:31:17.660479Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:31:17.661007Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiv2sh9" level=info timestamp=2018-07-26T09:31:17.702017Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:31:17.702201Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:31:17.702298Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiv2sh9" Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T09:22:27.881416Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:30.441720Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:30.442013Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:30.442151Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:35.562457Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:35.562655Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:35.562869Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:45.803290Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:22:45.819147Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:22:45.821083Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:22:45.821157Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:24:58.523687Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:24:58.524006Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:24:58.565159Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:24:58.565329Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiv2sh9-scvfn Pod phase: Running [signal SIGSEGV: segmentation violation code=0x1 addr=0x0 pc=0x1061516] goroutine 11 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4200460c0, 0xc42019ec80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421932040, 0xc42000e750, 0xc4203fe000, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200e4120, 0xc42000e098, 0x13, 0x131ed4d, 0x4, 0xc42006df18, 0x3, 0x3, 0xc4202afbc0, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200e4120, 0xc42000e098, 0x13, 0xc42194df18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4202aed40, 0xc4200e64b0, 0xc4204cf770, 0xc4204cf780, 0xc421948300, 0xc421924d40, 0x10f5e80, 0xc42000e750, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 09:31:46 http: TLS handshake error from 10.129.0.1:49076: EOF 2018/07/26 09:31:56 http: TLS handshake error from 10.129.0.1:49084: EOF 2018/07/26 09:32:06 http: TLS handshake error from 10.129.0.1:49092: EOF 2018/07/26 09:32:16 http: TLS handshake error from 10.129.0.1:49100: EOF 2018/07/26 09:32:26 http: TLS handshake error from 10.129.0.1:49108: EOF 2018/07/26 09:32:36 http: TLS handshake error from 10.129.0.1:49118: EOF 2018/07/26 09:32:46 http: TLS handshake error from 10.129.0.1:49126: EOF 2018/07/26 09:32:56 http: TLS handshake error from 10.129.0.1:49134: EOF 2018/07/26 09:33:06 http: TLS handshake error from 10.129.0.1:49142: EOF 2018/07/26 09:33:16 http: TLS handshake error from 10.129.0.1:49150: EOF 2018/07/26 09:33:26 http: TLS handshake error from 10.129.0.1:49158: EOF level=info timestamp=2018-07-26T09:33:29.989750Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:33:36 http: TLS handshake error from 10.129.0.1:49166: EOF 2018/07/26 09:33:46 http: TLS handshake error from 10.129.0.1:49174: EOF 2018/07/26 09:33:56 http: TLS handshake error from 10.129.0.1:49182: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T09:33:11.809546Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:33:17 http: TLS handshake error from 10.129.0.1:55462: EOF level=info timestamp=2018-07-26T09:33:19.157258Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:33:21.854515Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:33:27 http: TLS handshake error from 10.129.0.1:55470: EOF level=info timestamp=2018-07-26T09:33:31.901988Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:33:37.417702Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:33:37.418656Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:33:37 http: TLS handshake error from 10.129.0.1:55478: EOF level=info timestamp=2018-07-26T09:33:41.940995Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:33:47 http: TLS handshake error from 10.129.0.1:55486: EOF level=info timestamp=2018-07-26T09:33:49.224211Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:33:51.996496Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:33:57 http: TLS handshake error from 10.129.0.1:55494: EOF level=info timestamp=2018-07-26T09:34:00.039157Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T09:21:58.361844Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:21:58.395554Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:21:58.763376Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirm86d\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:24:58.738810Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:24:58.739059Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:27:59.274034Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqr64l kind= uid=2fffeb51-90b6-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:27:59.274190Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqr64l kind= uid=2fffeb51-90b6-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:27:59.320745Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:27:59.338025Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:27:59.349887Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:28:00.337718Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqr64l, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2fffeb51-90b6-11e8-8e98-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:28:00.511208Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:28:00.511343Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:31:00.978493Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:31:00.978616Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=error timestamp=2018-07-26T09:31:18.905260Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:31:18.905348Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiv2sh9" level=info timestamp=2018-07-26T09:31:20.185678Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:31:20.185890Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:31:20.185965Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiv2sh9" level=info timestamp=2018-07-26T09:31:22.746255Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:31:22.747037Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:31:22.747549Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiv2sh9" level=info timestamp=2018-07-26T09:31:27.868132Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:31:27.868337Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:31:27.868431Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiv2sh9" level=info timestamp=2018-07-26T09:31:38.108737Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:31:38.131422Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:31:38.131837Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:31:38.132113Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T09:22:27.881416Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:30.441720Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:30.442013Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:30.442151Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:35.562457Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:35.562655Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:35.562869Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:45.803290Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:22:45.819147Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:22:45.821083Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:22:45.821157Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:24:58.523687Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:24:58.524006Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:24:58.565159Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:24:58.565329Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiv2sh9-scvfn Pod phase: Failed goroutine 11 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4200460c0, 0xc42019ec80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421932040, 0xc42000e750, 0xc4203fe000, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200e4120, 0xc42000e098, 0x13, 0x131ed4d, 0x4, 0xc42006df18, 0x3, 0x3, 0xc4202afbc0, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200e4120, 0xc42000e098, 0x13, 0xc42194df18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4202aed40, 0xc4200e64b0, 0xc4204cf770, 0xc4204cf780, 0xc421948300, 0xc421924d40, 0x10f5e80, 0xc42000e750, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 • Failure in Spec Setup (BeforeEach) [180.462 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:238 when CPU model equals to passthrough [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:312 should report exactly the same model as node CPU /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:313 Unexpected Warning event received. Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:245 ------------------------------ level=info timestamp=2018-07-26T09:31:01.509777Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmiv2sh9-scvfn" level=info timestamp=2018-07-26T09:31:17.108083Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmiv2sh9-scvfn" level=error timestamp=2018-07-26T09:31:17.475880Z pos=utils.go:241 component=tests reason="unexpected warning event received" msg="unexpected EOF" Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 09:32:06 http: TLS handshake error from 10.129.0.1:49092: EOF 2018/07/26 09:32:16 http: TLS handshake error from 10.129.0.1:49100: EOF 2018/07/26 09:32:26 http: TLS handshake error from 10.129.0.1:49108: EOF 2018/07/26 09:32:36 http: TLS handshake error from 10.129.0.1:49118: EOF 2018/07/26 09:32:46 http: TLS handshake error from 10.129.0.1:49126: EOF 2018/07/26 09:32:56 http: TLS handshake error from 10.129.0.1:49134: EOF 2018/07/26 09:33:06 http: TLS handshake error from 10.129.0.1:49142: EOF 2018/07/26 09:33:16 http: TLS handshake error from 10.129.0.1:49150: EOF 2018/07/26 09:33:26 http: TLS handshake error from 10.129.0.1:49158: EOF level=info timestamp=2018-07-26T09:33:29.989750Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:33:36 http: TLS handshake error from 10.129.0.1:49166: EOF 2018/07/26 09:33:46 http: TLS handshake error from 10.129.0.1:49174: EOF 2018/07/26 09:33:56 http: TLS handshake error from 10.129.0.1:49182: EOF 2018/07/26 09:34:06 http: TLS handshake error from 10.129.0.1:49190: EOF 2018/07/26 09:34:16 http: TLS handshake error from 10.129.0.1:49198: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T09:33:31.901988Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:33:37.417702Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:33:37.418656Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:33:37 http: TLS handshake error from 10.129.0.1:55478: EOF level=info timestamp=2018-07-26T09:33:41.940995Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:33:47 http: TLS handshake error from 10.129.0.1:55486: EOF level=info timestamp=2018-07-26T09:33:49.224211Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:33:51.996496Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:33:57 http: TLS handshake error from 10.129.0.1:55494: EOF level=info timestamp=2018-07-26T09:34:00.039157Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:34:02.049328Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:34:07.500095Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:34:07.500767Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:34:07 http: TLS handshake error from 10.129.0.1:55502: EOF level=info timestamp=2018-07-26T09:34:12.089639Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T09:24:58.738810Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:24:58.739059Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:27:59.274034Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqr64l kind= uid=2fffeb51-90b6-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:27:59.274190Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqr64l kind= uid=2fffeb51-90b6-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:27:59.320745Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:27:59.338025Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:27:59.349887Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:28:00.337718Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqr64l, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2fffeb51-90b6-11e8-8e98-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:28:00.511208Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:28:00.511343Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:31:00.978493Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:31:00.978616Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:34:01.452528Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:34:01.452692Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:34:01.606047Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmir2jbk\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T09:34:17.250999Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:17.568412Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="unexpected EOF" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:17.596977Z pos=vm.go:251 component=virt-handler reason="unexpected EOF" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:17.597153Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:17.597243Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:17.597308Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:17.602244Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:17.602380Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:17.602444Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:17.623049Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:17.625356Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:17.625440Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:17.665879Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:17.666406Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:17.668565Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T09:22:27.881416Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:30.441720Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:30.442013Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:30.442151Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:35.562457Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:35.562655Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:35.562869Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:45.803290Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:22:45.819147Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:22:45.821083Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:22:45.821157Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:24:58.523687Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:24:58.524006Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:24:58.565159Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:24:58.565329Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmir2jbk-2w96h Pod phase: Running [signal SIGSEGV: segmentation violation code=0x1 addr=0x0 pc=0x1061516] goroutine 16 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4200a65d0, 0xc42019ec80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421932040, 0xc4200b7cf8, 0xc4202edc60, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200e4180, 0xc42000e078, 0x13, 0x131ed4d, 0x4, 0xc42006af18, 0x3, 0x3, 0xc4202acdc0, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200e4180, 0xc42000e078, 0x13, 0xc42005d718, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc4215c5480) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc42034b4c0, 0xc4200e61e0, 0xc4215ce6b0, 0xc4215ce6c0, 0xc421946300, 0xc421926d00, 0x10f5e80, 0xc4200b7cf8, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 09:35:06 http: TLS handshake error from 10.129.0.1:49238: EOF 2018/07/26 09:35:16 http: TLS handshake error from 10.129.0.1:49246: EOF 2018/07/26 09:35:26 http: TLS handshake error from 10.129.0.1:49254: EOF level=info timestamp=2018-07-26T09:35:29.977412Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:35:36 http: TLS handshake error from 10.129.0.1:49264: EOF 2018/07/26 09:35:46 http: TLS handshake error from 10.129.0.1:49272: EOF 2018/07/26 09:35:56 http: TLS handshake error from 10.129.0.1:49280: EOF level=info timestamp=2018-07-26T09:36:00.053609Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:36:06 http: TLS handshake error from 10.129.0.1:49288: EOF 2018/07/26 09:36:16 http: TLS handshake error from 10.129.0.1:49296: EOF 2018/07/26 09:36:26 http: TLS handshake error from 10.129.0.1:49304: EOF level=info timestamp=2018-07-26T09:36:30.040025Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:36:36 http: TLS handshake error from 10.129.0.1:49312: EOF 2018/07/26 09:36:46 http: TLS handshake error from 10.129.0.1:49320: EOF 2018/07/26 09:36:56 http: TLS handshake error from 10.129.0.1:49328: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T09:36:12.741343Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:36:17 http: TLS handshake error from 10.129.0.1:55608: EOF level=info timestamp=2018-07-26T09:36:19.511094Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:36:22.788355Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:36:27 http: TLS handshake error from 10.129.0.1:55616: EOF level=info timestamp=2018-07-26T09:36:32.854153Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:36:37 http: TLS handshake error from 10.129.0.1:55624: EOF level=info timestamp=2018-07-26T09:36:38.234691Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:36:38.238152Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:36:42.912761Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:36:47 http: TLS handshake error from 10.129.0.1:55632: EOF level=info timestamp=2018-07-26T09:36:49.564180Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:36:52.966044Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:36:57 http: TLS handshake error from 10.129.0.1:55640: EOF level=info timestamp=2018-07-26T09:37:00.019356Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T09:24:58.738810Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:24:58.739059Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5wjvx kind= uid=c4628907-90b5-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:27:59.274034Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqr64l kind= uid=2fffeb51-90b6-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:27:59.274190Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqr64l kind= uid=2fffeb51-90b6-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:27:59.320745Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:27:59.338025Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:27:59.349887Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:28:00.337718Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqr64l, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2fffeb51-90b6-11e8-8e98-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:28:00.511208Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:28:00.511343Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:31:00.978493Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:31:00.978616Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:34:01.452528Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:34:01.452692Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:34:01.606047Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmir2jbk\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=error timestamp=2018-07-26T09:34:18.872722Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:18.872865Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:20.153157Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:20.153351Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:20.153425Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:22.714014Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:22.714354Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:22.714595Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:27.835034Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:27.835243Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:27.835341Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:38.075655Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.100186Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:34:38.100905Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.101131Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T09:22:27.881416Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:30.441720Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:30.442013Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:30.442151Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:35.562457Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:22:35.562655Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:22:35.562869Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirm86d" level=info timestamp=2018-07-26T09:22:45.803290Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:22:45.819147Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:22:45.821083Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:22:45.821157Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:24:58.523687Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:24:58.524006Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind= uid=58a4a0f3-90b5-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:24:58.565159Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:24:58.565329Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirm86d kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmir2jbk-2w96h Pod phase: Failed goroutine 16 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4200a65d0, 0xc42019ec80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421932040, 0xc4200b7cf8, 0xc4202edc60, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200e4180, 0xc42000e078, 0x13, 0x131ed4d, 0x4, 0xc42006af18, 0x3, 0x3, 0xc4202acdc0, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200e4180, 0xc42000e078, 0x13, 0xc42005d718, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc4215c5480) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc42034b4c0, 0xc4200e61e0, 0xc4215ce6b0, 0xc4215ce6c0, 0xc421946300, 0xc421926d00, 0x10f5e80, 0xc4200b7cf8, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 • Failure in Spec Setup (BeforeEach) [180.473 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:238 when CPU model not defined [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:336 should report CPU model from libvirt capabilities /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:337 Unexpected Warning event received. Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:245 ------------------------------ level=info timestamp=2018-07-26T09:34:02.049963Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmir2jbk-2w96h" level=info timestamp=2018-07-26T09:34:17.082725Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmir2jbk-2w96h" level=error timestamp=2018-07-26T09:34:17.432297Z pos=utils.go:241 component=tests reason="unexpected warning event received" msg="unexpected EOF" Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 09:35:26 http: TLS handshake error from 10.129.0.1:49254: EOF level=info timestamp=2018-07-26T09:35:29.977412Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:35:36 http: TLS handshake error from 10.129.0.1:49264: EOF 2018/07/26 09:35:46 http: TLS handshake error from 10.129.0.1:49272: EOF 2018/07/26 09:35:56 http: TLS handshake error from 10.129.0.1:49280: EOF level=info timestamp=2018-07-26T09:36:00.053609Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:36:06 http: TLS handshake error from 10.129.0.1:49288: EOF 2018/07/26 09:36:16 http: TLS handshake error from 10.129.0.1:49296: EOF 2018/07/26 09:36:26 http: TLS handshake error from 10.129.0.1:49304: EOF level=info timestamp=2018-07-26T09:36:30.040025Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:36:36 http: TLS handshake error from 10.129.0.1:49312: EOF 2018/07/26 09:36:46 http: TLS handshake error from 10.129.0.1:49320: EOF 2018/07/26 09:36:56 http: TLS handshake error from 10.129.0.1:49328: EOF 2018/07/26 09:37:06 http: TLS handshake error from 10.129.0.1:49336: EOF 2018/07/26 09:37:16 http: TLS handshake error from 10.129.0.1:49344: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running 2018/07/26 09:36:37 http: TLS handshake error from 10.129.0.1:55624: EOF level=info timestamp=2018-07-26T09:36:38.234691Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:36:38.238152Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:36:42.912761Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:36:47 http: TLS handshake error from 10.129.0.1:55632: EOF level=info timestamp=2018-07-26T09:36:49.564180Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:36:52.966044Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:36:57 http: TLS handshake error from 10.129.0.1:55640: EOF level=info timestamp=2018-07-26T09:37:00.019356Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:37:03.019045Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:37:07 http: TLS handshake error from 10.129.0.1:55648: EOF level=info timestamp=2018-07-26T09:37:08.315532Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:37:08.317515Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:37:13.064333Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:37:17 http: TLS handshake error from 10.129.0.1:55656: EOF Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T09:27:59.320745Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:27:59.338025Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:27:59.349887Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:28:00.337718Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqr64l, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2fffeb51-90b6-11e8-8e98-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:28:00.511208Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:28:00.511343Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:31:00.978493Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:31:00.978616Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:34:01.452528Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:34:01.452692Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:34:01.606047Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmir2jbk\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:37:01.919865Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:37:01.920028Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:37:01.989285Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi58d29\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi58d29" level=info timestamp=2018-07-26T09:37:02.006268Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi58d29\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi58d29" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T09:34:20.153425Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:22.714014Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:22.714354Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:22.714595Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:27.835034Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:27.835243Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:27.835341Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:38.075655Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.100186Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:34:38.100905Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.101131Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.307694Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.308283Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.338335Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.338475Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T09:37:19.602330Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:37:19.602741Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:37:19.602894Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi58d29" level=info timestamp=2018-07-26T09:37:19.607421Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:37:19.607685Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:37:19.608053Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi58d29" level=info timestamp=2018-07-26T09:37:19.628486Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:37:19.628713Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:37:19.629133Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi58d29" level=info timestamp=2018-07-26T09:37:19.669698Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:37:19.669887Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:37:19.670003Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi58d29" level=info timestamp=2018-07-26T09:37:19.750957Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:37:19.751161Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:37:19.751238Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi58d29" Pod name: virt-launcher-testvmi58d29-6snrb Pod phase: Running goroutine 8 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42027a910, 0xc42019ec80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421922ca0, 0xc42000e948, 0xc420393ba0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc420096ea0, 0xc4200b6938, 0x13, 0x131ed4d, 0x4, 0xc42006ef18, 0x3, 0x3, 0xc4200bba40, 0x12, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc420096ea0, 0xc4200b6938, 0x13, 0xc4206e1f18, 0x3, 0x3, 0x1, 0x4, 0xc420242e80) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4200bab80, 0xc4200b8e10, 0xc421a970c0, 0xc421a970d0, 0xc421925d80, 0xc4219300a0, 0x10f5e80, 0xc42000e948, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 09:38:06 http: TLS handshake error from 10.129.0.1:49384: EOF 2018/07/26 09:38:16 http: TLS handshake error from 10.129.0.1:49392: EOF 2018/07/26 09:38:26 http: TLS handshake error from 10.129.0.1:49400: EOF level=info timestamp=2018-07-26T09:38:29.997439Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:38:36 http: TLS handshake error from 10.129.0.1:49410: EOF 2018/07/26 09:38:46 http: TLS handshake error from 10.129.0.1:49418: EOF 2018/07/26 09:38:56 http: TLS handshake error from 10.129.0.1:49426: EOF level=info timestamp=2018-07-26T09:39:00.069703Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:39:06 http: TLS handshake error from 10.129.0.1:49434: EOF 2018/07/26 09:39:16 http: TLS handshake error from 10.129.0.1:49442: EOF 2018/07/26 09:39:26 http: TLS handshake error from 10.129.0.1:49450: EOF 2018/07/26 09:39:36 http: TLS handshake error from 10.129.0.1:49458: EOF 2018/07/26 09:39:46 http: TLS handshake error from 10.129.0.1:49466: EOF 2018/07/26 09:39:56 http: TLS handshake error from 10.129.0.1:49474: EOF level=info timestamp=2018-07-26T09:40:00.069582Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T09:39:19.909433Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:39:23.726278Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:39:27 http: TLS handshake error from 10.129.0.1:55762: EOF level=info timestamp=2018-07-26T09:39:30.022690Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:39:33.881283Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:39:37 http: TLS handshake error from 10.129.0.1:55770: EOF level=info timestamp=2018-07-26T09:39:38.872966Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:39:38.877276Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:39:43.940266Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:39:44.261441Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:39:44.264715Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/26 09:39:47 http: TLS handshake error from 10.129.0.1:55778: EOF level=info timestamp=2018-07-26T09:39:49.972016Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:39:53.998536Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:39:57 http: TLS handshake error from 10.129.0.1:55786: EOF Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T09:27:59.320745Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:27:59.338025Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:27:59.349887Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:28:00.337718Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqr64l\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqr64l, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2fffeb51-90b6-11e8-8e98-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqr64l" level=info timestamp=2018-07-26T09:28:00.511208Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:28:00.511343Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwgrmw kind= uid=30bc43b3-90b6-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:31:00.978493Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:31:00.978616Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv2sh9 kind= uid=9c4d9653-90b6-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:34:01.452528Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:34:01.452692Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:34:01.606047Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmir2jbk\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:37:01.919865Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:37:01.920028Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:37:01.989285Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi58d29\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi58d29" level=info timestamp=2018-07-26T09:37:02.006268Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi58d29\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi58d29" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T09:34:20.153425Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:22.714014Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:22.714354Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:22.714595Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:27.835034Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:27.835243Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:27.835341Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:38.075655Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.100186Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:34:38.100905Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.101131Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.307694Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.308283Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.338335Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.338475Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=error timestamp=2018-07-26T09:37:20.873227Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:37:20.873303Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi58d29" level=info timestamp=2018-07-26T09:37:22.153688Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:37:22.153990Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:37:22.154071Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi58d29" level=info timestamp=2018-07-26T09:37:24.714359Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:37:24.714677Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:37:24.714776Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi58d29" level=info timestamp=2018-07-26T09:37:29.835226Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:37:29.835459Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:37:29.835572Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi58d29" level=info timestamp=2018-07-26T09:37:40.076047Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:37:40.100991Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:40.101986Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:37:40.102042Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi58d29 kind= uid=73710eb5-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi58d29-6snrb Pod phase: Failed goroutine 8 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42027a910, 0xc42019ec80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421922ca0, 0xc42000e948, 0xc420393ba0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc420096ea0, 0xc4200b6938, 0x13, 0x131ed4d, 0x4, 0xc42006ef18, 0x3, 0x3, 0xc4200bba40, 0x12, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc420096ea0, 0xc4200b6938, 0x13, 0xc4206e1f18, 0x3, 0x3, 0x1, 0x4, 0xc420242e80) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4200bab80, 0xc4200b8e10, 0xc421a970c0, 0xc421a970d0, 0xc421925d80, 0xc4219300a0, 0x10f5e80, 0xc42000e948, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 • Failure [180.463 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 New VirtualMachineInstance with all supported drives /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:357 should have all the device nodes [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:380 Unexpected Warning event received. Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:245 ------------------------------ level=info timestamp=2018-07-26T09:37:02.403593Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmi58d29-6snrb" level=info timestamp=2018-07-26T09:37:19.673226Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmi58d29-6snrb" level=error timestamp=2018-07-26T09:37:20.029407Z pos=utils.go:241 component=tests reason="unexpected warning event received" msg="unexpected EOF" •• Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 09:43:16 http: TLS handshake error from 10.129.0.1:49636: EOF 2018/07/26 09:43:26 http: TLS handshake error from 10.129.0.1:49644: EOF level=info timestamp=2018-07-26T09:43:30.003565Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:43:36 http: TLS handshake error from 10.129.0.1:49652: EOF 2018/07/26 09:43:46 http: TLS handshake error from 10.129.0.1:49660: EOF 2018/07/26 09:43:56 http: TLS handshake error from 10.129.0.1:49668: EOF 2018/07/26 09:44:06 http: TLS handshake error from 10.129.0.1:49676: EOF 2018/07/26 09:44:16 http: TLS handshake error from 10.129.0.1:49684: EOF 2018/07/26 09:44:26 http: TLS handshake error from 10.129.0.1:49692: EOF level=info timestamp=2018-07-26T09:44:28.897825Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:44:30.108788Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:44:36 http: TLS handshake error from 10.129.0.1:49700: EOF 2018/07/26 09:44:46 http: TLS handshake error from 10.129.0.1:49710: EOF 2018/07/26 09:44:56 http: TLS handshake error from 10.129.0.1:49718: EOF level=info timestamp=2018-07-26T09:45:00.025178Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T09:44:28.913861Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:44:35.512554Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:44:37.182567Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:44:37.196366Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:44:37.208974Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:44:37 http: TLS handshake error from 10.129.0.1:56014: EOF level=info timestamp=2018-07-26T09:44:39.828812Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:44:39.829939Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:44:44.004285Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:44:44.005373Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:44:45.595639Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:44:47 http: TLS handshake error from 10.129.0.1:56022: EOF level=info timestamp=2018-07-26T09:44:50.500342Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:44:55.646542Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:44:57 http: TLS handshake error from 10.129.0.1:56030: EOF Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T09:44:46.179213Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=8828ad86-90b8-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T09:44:46.179250Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=8828ad86-90b8-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T09:44:46.196174Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=df1274e3-90b7-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:44:46.196244Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=df1274e3-90b7-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T09:44:46.210528Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=df1274e3-90b7-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:44:46.210594Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=df1274e3-90b7-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T09:44:46.227353Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=df1274e3-90b7-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:44:46.227403Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=df1274e3-90b7-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T09:44:46.247767Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=df1274e3-90b7-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:44:46.247845Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=df1274e3-90b7-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T09:44:46.249729Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2t4jl\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2t4jl" level=info timestamp=2018-07-26T09:44:59.849419Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=df1274e3-90b7-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:44:59.849503Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=df1274e3-90b7-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T09:45:00.106892Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=df1274e3-90b7-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:45:00.107023Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=df1274e3-90b7-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T09:34:20.153425Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:22.714014Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:22.714354Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:22.714595Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:27.835034Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:27.835243Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:27.835341Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:38.075655Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.100186Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:34:38.100905Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.101131Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.307694Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.308283Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.338335Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.338475Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T09:45:00.184980Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=8828ad86-90b8-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:45:00.185211Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=8828ad86-90b8-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:45:00.185289Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2t4jl" level=info timestamp=2018-07-26T09:45:00.265763Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=8828ad86-90b8-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:45:00.265975Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=8828ad86-90b8-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:45:00.266050Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2t4jl" level=info timestamp=2018-07-26T09:45:00.426405Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=8828ad86-90b8-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:45:00.426624Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=8828ad86-90b8-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:45:00.426699Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2t4jl" level=info timestamp=2018-07-26T09:45:00.747009Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=8828ad86-90b8-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:45:00.747195Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=8828ad86-90b8-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:45:00.747316Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2t4jl" level=info timestamp=2018-07-26T09:45:01.387684Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=8828ad86-90b8-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:45:01.387891Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi2t4jl kind= uid=8828ad86-90b8-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:45:01.388063Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2t4jl" Pod name: virt-launcher-testvmi2t4jl-n5psw Pod phase: Running goroutine 9 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4204ea370, 0xc42019ec80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42190cce0, 0xc4200b7ff8, 0xc4203ffb20, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc420096fc0, 0xc4200b6670, 0x13, 0x131ed4d, 0x4, 0xc42006ef18, 0x3, 0x3, 0xc4200bacc0, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc420096fc0, 0xc4200b6670, 0x13, 0xc421aa5f18, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc420657300) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4200bad00, 0xc4200b8af0, 0xc420232cc0, 0xc420232cd0, 0xc42190fd80, 0xc42191a0a0, 0x10f5e80, 0xc4200b7ff8, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 ------------------------------ • Failure [300.275 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should update VirtualMachine once VMIs are up [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:195 Timed out after 300.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:201 ------------------------------ • [SLOW TEST:12.277 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should remove VirtualMachineInstance once the VMI is marked for deletion /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:204 ------------------------------ • Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 09:48:16 http: TLS handshake error from 10.129.0.1:49880: EOF 2018/07/26 09:48:26 http: TLS handshake error from 10.129.0.1:49888: EOF level=info timestamp=2018-07-26T09:48:29.995478Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:48:36 http: TLS handshake error from 10.129.0.1:49896: EOF 2018/07/26 09:48:46 http: TLS handshake error from 10.129.0.1:49904: EOF 2018/07/26 09:48:56 http: TLS handshake error from 10.129.0.1:49912: EOF level=info timestamp=2018-07-26T09:49:00.071175Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:49:06 http: TLS handshake error from 10.129.0.1:49920: EOF 2018/07/26 09:49:16 http: TLS handshake error from 10.129.0.1:49928: EOF 2018/07/26 09:49:26 http: TLS handshake error from 10.129.0.1:49936: EOF 2018/07/26 09:49:36 http: TLS handshake error from 10.129.0.1:49944: EOF 2018/07/26 09:49:46 http: TLS handshake error from 10.129.0.1:49952: EOF 2018/07/26 09:49:56 http: TLS handshake error from 10.129.0.1:49960: EOF 2018/07/26 09:50:06 http: TLS handshake error from 10.129.0.1:49968: EOF 2018/07/26 09:50:16 http: TLS handshake error from 10.129.0.1:49976: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T09:49:40.891780Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:49:40.894633Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:49:43.202717Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:49:43.203777Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:49:47.147687Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:49:47 http: TLS handshake error from 10.129.0.1:56264: EOF level=info timestamp=2018-07-26T09:49:51.153360Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:49:57.200835Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:49:57 http: TLS handshake error from 10.129.0.1:56272: EOF level=info timestamp=2018-07-26T09:50:00.017242Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:50:07.257285Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:50:07 http: TLS handshake error from 10.129.0.1:56280: EOF level=info timestamp=2018-07-26T09:50:10.976976Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:50:10.977689Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:50:17.295629Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T09:50:02.373057Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=9ab66d02-90b8-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T09:50:02.401387Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=9ab66d02-90b8-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:50:02.401431Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=9ab66d02-90b8-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=error timestamp=2018-07-26T09:50:02.409158Z pos=vm.go:202 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=9ab66d02-90b8-11e8-8e98-525500d15501 reason="Operation cannot be fulfilled on virtualmachines.kubevirt.io \"testvmi4zdwv\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachine status failed." level=info timestamp=2018-07-26T09:50:02.409240Z pos=vm.go:111 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachines.kubevirt.io \"testvmi4zdwv\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachine kubevirt-test-default/testvmi4zdwv" level=info timestamp=2018-07-26T09:50:02.409288Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=9ab66d02-90b8-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:50:02.409344Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=9ab66d02-90b8-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T09:50:02.414431Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=9ab66d02-90b8-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:50:02.414494Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=9ab66d02-90b8-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T09:50:02.432305Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=9ab66d02-90b8-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:50:02.432363Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=9ab66d02-90b8-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T09:50:02.439621Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi4zdwv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi4zdwv" level=info timestamp=2018-07-26T09:50:02.456011Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=9ab66d02-90b8-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:50:02.456131Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=9ab66d02-90b8-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T09:50:02.481885Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi4zdwv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi4zdwv" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T09:34:20.153425Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:22.714014Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:22.714354Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:22.714595Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:27.835034Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:27.835243Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:27.835341Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:38.075655Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.100186Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:34:38.100905Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.101131Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.307694Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.308283Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.338335Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.338475Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T09:49:44.340391Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4zdwv" level=info timestamp=2018-07-26T09:49:46.900674Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=2faacc4c-90b9-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:49:46.900859Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=2faacc4c-90b9-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:49:46.900975Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4zdwv" level=info timestamp=2018-07-26T09:49:52.021284Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=2faacc4c-90b9-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:49:52.021505Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=2faacc4c-90b9-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:49:52.021618Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4zdwv" level=info timestamp=2018-07-26T09:50:02.262030Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=2faacc4c-90b9-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:50:02.282631Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=2faacc4c-90b9-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:50:02.282760Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=2faacc4c-90b9-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:50:02.282805Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=2faacc4c-90b9-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:50:02.293804Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=2faacc4c-90b9-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:50:02.294098Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi4zdwv kind= uid=2faacc4c-90b9-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:50:02.333580Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi4zdwv kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:50:02.333696Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi4zdwv kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi4zdwv-cmj9m Pod phase: Running level=info timestamp=2018-07-26T09:50:05.790659Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-26T09:50:05.790918Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-26T09:50:05.793356Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-26T09:50:15.804056Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-26T09:50:15.854707Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi4zdwv" level=info timestamp=2018-07-26T09:50:15.859322Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-26T09:50:15.859490Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" ------------------------------ • Failure [300.461 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should recreate VirtualMachineInstance if it gets deleted [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:245 Timed out after 300.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:157 ------------------------------ STEP: Starting the VirtualMachineInstance STEP: VMI has the running condition Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 09:50:16 http: TLS handshake error from 10.129.0.1:49976: EOF 2018/07/26 09:50:26 http: TLS handshake error from 10.129.0.1:49984: EOF level=info timestamp=2018-07-26T09:50:30.095209Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:50:36 http: TLS handshake error from 10.129.0.1:49992: EOF 2018/07/26 09:50:46 http: TLS handshake error from 10.129.0.1:50002: EOF 2018/07/26 09:50:56 http: TLS handshake error from 10.129.0.1:50010: EOF 2018/07/26 09:51:06 http: TLS handshake error from 10.129.0.1:50018: EOF 2018/07/26 09:51:16 http: TLS handshake error from 10.129.0.1:50026: EOF 2018/07/26 09:51:26 http: TLS handshake error from 10.129.0.1:50034: EOF level=info timestamp=2018-07-26T09:51:29.997324Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:51:36 http: TLS handshake error from 10.129.0.1:50042: EOF 2018/07/26 09:51:46 http: TLS handshake error from 10.129.0.1:50050: EOF 2018/07/26 09:51:56 http: TLS handshake error from 10.129.0.1:50058: EOF 2018/07/26 09:52:06 http: TLS handshake error from 10.129.0.1:50066: EOF 2018/07/26 09:52:16 http: TLS handshake error from 10.129.0.1:50074: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T09:51:37.628713Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:51:37 http: TLS handshake error from 10.129.0.1:56354: EOF level=info timestamp=2018-07-26T09:51:41.351635Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:51:41.357595Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:51:47.667816Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:51:47 http: TLS handshake error from 10.129.0.1:56362: EOF level=info timestamp=2018-07-26T09:51:51.546836Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:51:57.706682Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:51:57 http: TLS handshake error from 10.129.0.1:56370: EOF level=info timestamp=2018-07-26T09:51:59.992106Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T09:52:07.752971Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:52:07 http: TLS handshake error from 10.129.0.1:56378: EOF level=info timestamp=2018-07-26T09:52:11.432694Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:52:11.435246Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:52:17.797891Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T09:52:05.891453Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=4dcdde30-90b9-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:52:05.891542Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=4dcdde30-90b9-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T09:52:05.905252Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=4dcdde30-90b9-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:52:05.905299Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=4dcdde30-90b9-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=error timestamp=2018-07-26T09:52:05.916415Z pos=vm.go:202 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=4dcdde30-90b9-11e8-8e98-525500d15501 reason="Operation cannot be fulfilled on virtualmachines.kubevirt.io \"testvmigzpl5\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachine status failed." level=info timestamp=2018-07-26T09:52:05.916502Z pos=vm.go:111 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachines.kubevirt.io \"testvmigzpl5\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachine kubevirt-test-default/testvmigzpl5" level=info timestamp=2018-07-26T09:52:05.916543Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=4dcdde30-90b9-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:52:05.916611Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=4dcdde30-90b9-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T09:52:05.921699Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=4dcdde30-90b9-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:52:05.921761Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=4dcdde30-90b9-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T09:52:05.935402Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=4dcdde30-90b9-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:52:05.935469Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=4dcdde30-90b9-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T09:52:05.941631Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigzpl5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigzpl5" level=info timestamp=2018-07-26T09:52:05.953975Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=4dcdde30-90b9-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:52:05.954054Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=4dcdde30-90b9-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T09:34:20.153425Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:22.714014Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:22.714354Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:22.714595Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:27.835034Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:27.835243Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:27.835341Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:38.075655Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.100186Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:34:38.100905Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.101131Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.307694Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.308283Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.338335Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.338475Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T09:51:47.866867Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmigzpl5" level=info timestamp=2018-07-26T09:51:50.427312Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=78c4501e-90b9-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:51:50.428302Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=78c4501e-90b9-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:51:50.428643Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmigzpl5" level=info timestamp=2018-07-26T09:51:55.549226Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=78c4501e-90b9-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:51:55.549433Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=78c4501e-90b9-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:51:55.549515Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmigzpl5" level=info timestamp=2018-07-26T09:52:05.789850Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=78c4501e-90b9-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:52:05.805752Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=78c4501e-90b9-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:52:05.807060Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=78c4501e-90b9-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:52:05.807122Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=78c4501e-90b9-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:52:05.823377Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=78c4501e-90b9-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:52:05.823586Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmigzpl5 kind= uid=78c4501e-90b9-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:52:05.869728Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmigzpl5 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:52:05.869882Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmigzpl5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmigzpl5-2wlhs Pod phase: Running level=info timestamp=2018-07-26T09:52:09.668543Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-26T09:52:09.669653Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-26T09:52:09.672238Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" • Failure [120.439 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should recreate VirtualMachineInstance if the VirtualMachineInstance's pod gets deleted [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:265 Timed out after 120.000s. Expected success, but got an error: <*errors.errorString | 0xc42049ce90>: { s: "vmi still isn't running", } vmi still isn't running /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:284 ------------------------------ STEP: Creating a new VMI STEP: Waiting for the VMI's VirtualMachineInstance to start Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 09:55:26 http: TLS handshake error from 10.129.0.1:50228: EOF level=info timestamp=2018-07-26T09:55:30.103525Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:55:36 http: TLS handshake error from 10.129.0.1:50236: EOF 2018/07/26 09:55:46 http: TLS handshake error from 10.129.0.1:50244: EOF 2018/07/26 09:55:56 http: TLS handshake error from 10.129.0.1:50252: EOF level=info timestamp=2018-07-26T09:56:00.054028Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:56:06 http: TLS handshake error from 10.129.0.1:50260: EOF 2018/07/26 09:56:16 http: TLS handshake error from 10.129.0.1:50268: EOF 2018/07/26 09:56:26 http: TLS handshake error from 10.129.0.1:50276: EOF level=info timestamp=2018-07-26T09:56:30.052359Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:56:36 http: TLS handshake error from 10.129.0.1:50284: EOF 2018/07/26 09:56:46 http: TLS handshake error from 10.129.0.1:50294: EOF 2018/07/26 09:56:56 http: TLS handshake error from 10.129.0.1:50302: EOF 2018/07/26 09:57:06 http: TLS handshake error from 10.129.0.1:50310: EOF 2018/07/26 09:57:16 http: TLS handshake error from 10.129.0.1:50318: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T09:56:39.562164Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:56:39.578525Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:56:42.290354Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:56:42.290674Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:56:47 http: TLS handshake error from 10.129.0.1:56606: EOF level=info timestamp=2018-07-26T09:56:49.175004Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:56:52.137708Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:56:57 http: TLS handshake error from 10.129.0.1:56614: EOF level=info timestamp=2018-07-26T09:56:59.221861Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:57:00.013715Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 09:57:07 http: TLS handshake error from 10.129.0.1:56622: EOF level=info timestamp=2018-07-26T09:57:09.268275Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:57:12.387525Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T09:57:12.388273Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 09:57:17 http: TLS handshake error from 10.129.0.1:56630: EOF Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=error timestamp=2018-07-26T09:57:04.022447Z pos=vm.go:202 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=9597872a-90b9-11e8-8e98-525500d15501 reason="Operation cannot be fulfilled on virtualmachines.kubevirt.io \"testvmi2hhsz\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachine status failed." level=info timestamp=2018-07-26T09:57:04.022490Z pos=vm.go:111 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachines.kubevirt.io \"testvmi2hhsz\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachine kubevirt-test-default/testvmi2hhsz" level=info timestamp=2018-07-26T09:57:04.022522Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=9597872a-90b9-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:57:04.022550Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=9597872a-90b9-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T09:57:04.027636Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=9597872a-90b9-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:57:04.027726Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=9597872a-90b9-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T09:57:04.046083Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=9597872a-90b9-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:57:04.046800Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=9597872a-90b9-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T09:57:04.050723Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2hhsz\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2hhsz" level=info timestamp=2018-07-26T09:57:04.067605Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=9597872a-90b9-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:57:04.067684Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=9597872a-90b9-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T09:57:18.352556Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=9597872a-90b9-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:57:18.352645Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=9597872a-90b9-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T09:57:18.605720Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=9597872a-90b9-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T09:57:18.605804Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=9597872a-90b9-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T09:34:20.153425Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:22.714014Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:22.714354Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:22.714595Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:27.835034Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:27.835243Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:27.835341Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:38.075655Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.100186Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:34:38.100905Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.101131Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.307694Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.308283Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.338335Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.338475Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T09:57:18.631034Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=3fee31ff-90ba-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:57:18.631128Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=3fee31ff-90ba-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:57:18.631197Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2hhsz" level=info timestamp=2018-07-26T09:57:18.671408Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=3fee31ff-90ba-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:57:18.671562Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=3fee31ff-90ba-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:57:18.671691Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2hhsz" level=info timestamp=2018-07-26T09:57:18.752023Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=3fee31ff-90ba-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:57:18.752330Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=3fee31ff-90ba-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:57:18.752501Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2hhsz" level=info timestamp=2018-07-26T09:57:18.912825Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=3fee31ff-90ba-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:57:18.913093Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=3fee31ff-90ba-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:57:18.913338Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2hhsz" level=info timestamp=2018-07-26T09:57:19.233683Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=3fee31ff-90ba-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:57:19.234011Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi2hhsz kind= uid=3fee31ff-90ba-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:57:19.234104Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2hhsz" Pod name: virt-launcher-testvmi2hhsz-w6jz4 Pod phase: Running goroutine 31 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4202ab460, 0xc42018cc80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42192acc0, 0xc4219be360, 0xc4203148c0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc420096d80, 0xc4200d66c0, 0x13, 0x131ed4d, 0x4, 0xc42006ff18, 0x3, 0x3, 0xc420089b40, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc420096d80, 0xc4200d66c0, 0x13, 0xc4219b3718, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420282000, 0xc4200d4a50, 0xc421730210, 0xc421730220, 0xc42192de00, 0xc4219360a0, 0x10f5e80, 0xc4219be360, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 • Failure [301.465 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should stop VirtualMachineInstance if running set to false [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:325 Timed out after 300.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:157 ------------------------------ STEP: Starting the VirtualMachineInstance STEP: VMI has the running condition Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:00:06 http: TLS handshake error from 10.129.0.1:50456: EOF 2018/07/26 10:00:16 http: TLS handshake error from 10.129.0.1:50464: EOF 2018/07/26 10:00:26 http: TLS handshake error from 10.129.0.1:50472: EOF 2018/07/26 10:00:36 http: TLS handshake error from 10.129.0.1:50480: EOF 2018/07/26 10:00:46 http: TLS handshake error from 10.129.0.1:50488: EOF 2018/07/26 10:00:56 http: TLS handshake error from 10.129.0.1:50496: EOF level=info timestamp=2018-07-26T10:01:00.015172Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:01:06 http: TLS handshake error from 10.129.0.1:50504: EOF 2018/07/26 10:01:16 http: TLS handshake error from 10.129.0.1:50512: EOF 2018/07/26 10:01:26 http: TLS handshake error from 10.129.0.1:50520: EOF 2018/07/26 10:01:36 http: TLS handshake error from 10.129.0.1:50528: EOF 2018/07/26 10:01:46 http: TLS handshake error from 10.129.0.1:50536: EOF 2018/07/26 10:01:56 http: TLS handshake error from 10.129.0.1:50544: EOF 2018/07/26 10:02:06 http: TLS handshake error from 10.129.0.1:50552: EOF 2018/07/26 10:02:16 http: TLS handshake error from 10.129.0.1:50560: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running 2018/07/26 10:01:37 http: TLS handshake error from 10.129.0.1:56840: EOF level=info timestamp=2018-07-26T10:01:40.671694Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:01:43.383369Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:01:43.383659Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:01:47 http: TLS handshake error from 10.129.0.1:56848: EOF level=info timestamp=2018-07-26T10:01:50.720042Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:01:52.683727Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:01:57 http: TLS handshake error from 10.129.0.1:56856: EOF level=info timestamp=2018-07-26T10:02:00.024681Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T10:02:00.760469Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:02:07 http: TLS handshake error from 10.129.0.1:56864: EOF level=info timestamp=2018-07-26T10:02:10.800163Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:02:13.479900Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:02:13.481234Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:02:17 http: TLS handshake error from 10.129.0.1:56872: EOF Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=error timestamp=2018-07-26T10:02:03.165534Z pos=vm.go:202 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=49474915-90ba-11e8-8e98-525500d15501 reason="Operation cannot be fulfilled on virtualmachines.kubevirt.io \"testvmi7j2sv\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachine status failed." level=info timestamp=2018-07-26T10:02:03.165609Z pos=vm.go:111 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachines.kubevirt.io \"testvmi7j2sv\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachine kubevirt-test-default/testvmi7j2sv" level=info timestamp=2018-07-26T10:02:03.165668Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=49474915-90ba-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:02:03.165704Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=49474915-90ba-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:02:03.166377Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=49474915-90ba-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:02:03.166435Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=49474915-90ba-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:02:03.170798Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=49474915-90ba-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:02:03.170932Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=49474915-90ba-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:02:03.182421Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=49474915-90ba-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:02:03.182524Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=49474915-90ba-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:02:03.196326Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7j2sv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7j2sv" level=info timestamp=2018-07-26T10:02:17.856802Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=49474915-90ba-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:02:17.856896Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=49474915-90ba-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:02:18.096851Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=49474915-90ba-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:02:18.097417Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=49474915-90ba-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T09:34:20.153425Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:22.714014Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:22.714354Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:22.714595Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:27.835034Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:27.835243Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:27.835341Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:38.075655Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.100186Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:34:38.100905Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.101131Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.307694Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.308283Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.338335Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.338475Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T10:02:18.243604Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=f23b300d-90ba-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:02:18.243788Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=f23b300d-90ba-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:02:18.243880Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi7j2sv" level=info timestamp=2018-07-26T10:02:18.404210Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=f23b300d-90ba-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:02:18.404424Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=f23b300d-90ba-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:02:18.404510Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi7j2sv" level=info timestamp=2018-07-26T10:02:18.724830Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=f23b300d-90ba-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:02:18.725134Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=f23b300d-90ba-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:02:18.725263Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi7j2sv" level=info timestamp=2018-07-26T10:02:19.365680Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=f23b300d-90ba-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:02:19.365883Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=f23b300d-90ba-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:02:19.366018Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi7j2sv" level=info timestamp=2018-07-26T10:02:20.646360Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=f23b300d-90ba-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:02:20.646564Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi7j2sv kind= uid=f23b300d-90ba-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:02:20.646684Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi7j2sv" Pod name: virt-launcher-testvmi7j2sv-dx8j5 Pod phase: Running goroutine 11 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42027a3e0, 0xc42019ec80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42192cce0, 0xc42000e238, 0xc4200c4040, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc420096e40, 0xc4200b6250, 0x13, 0x131ed4d, 0x4, 0xc42006ff18, 0x3, 0x3, 0xc420088040, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc420096e40, 0xc4200b6250, 0x13, 0xc421adbf18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4202ae4c0, 0xc4200b8aa0, 0xc4219f1e90, 0xc4219f1ea0, 0xc42192fe00, 0xc4219926e0, 0x10f5e80, 0xc42000e238, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 • Failure [301.476 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should start and stop VirtualMachineInstance multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:333 Timed out after 300.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:157 ------------------------------ STEP: Doing run: 0 STEP: Starting the VirtualMachineInstance STEP: VMI has the running condition Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:06:16 http: TLS handshake error from 10.129.0.1:50756: EOF 2018/07/26 10:06:26 http: TLS handshake error from 10.129.0.1:50764: EOF level=info timestamp=2018-07-26T10:06:29.996849Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:06:36 http: TLS handshake error from 10.129.0.1:50772: EOF 2018/07/26 10:06:46 http: TLS handshake error from 10.129.0.1:50780: EOF 2018/07/26 10:06:56 http: TLS handshake error from 10.129.0.1:50788: EOF level=info timestamp=2018-07-26T10:07:00.084075Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:07:06 http: TLS handshake error from 10.129.0.1:50796: EOF 2018/07/26 10:07:16 http: TLS handshake error from 10.129.0.1:50804: EOF 2018/07/26 10:07:26 http: TLS handshake error from 10.129.0.1:50812: EOF 2018/07/26 10:07:36 http: TLS handshake error from 10.129.0.1:50820: EOF 2018/07/26 10:07:46 http: TLS handshake error from 10.129.0.1:50828: EOF 2018/07/26 10:07:56 http: TLS handshake error from 10.129.0.1:50836: EOF 2018/07/26 10:08:06 http: TLS handshake error from 10.129.0.1:50844: EOF 2018/07/26 10:08:16 http: TLS handshake error from 10.129.0.1:50852: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T10:07:44.082627Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T10:07:44.086625Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T10:07:44.465288Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:07:44.467438Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:07:47 http: TLS handshake error from 10.129.0.1:57140: EOF level=info timestamp=2018-07-26T10:07:53.095002Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:07:53.349381Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:07:57 http: TLS handshake error from 10.129.0.1:57148: EOF level=info timestamp=2018-07-26T10:08:00.100396Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T10:08:03.145733Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:08:07 http: TLS handshake error from 10.129.0.1:57156: EOF level=info timestamp=2018-07-26T10:08:13.191611Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:08:14.541263Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:08:14.544252Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:08:17 http: TLS handshake error from 10.129.0.1:57164: EOF Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:08:21.030785Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6s57b kind= uid=fd008243-90ba-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:08:21.030891Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6s57b kind= uid=fd008243-90ba-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:08:21.031085Z pos=vm.go:377 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6s57b kind= uid=fd008243-90ba-11e8-8e98-525500d15501 msg="Setting stabile UUID '8bd28cc0-d634-5bd0-9e7b-3c97f09b8096' (was '')" level=info timestamp=2018-07-26T10:08:21.044999Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6s57b kind= uid=d37c3cd8-90bb-11e8-8e98-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=info timestamp=2018-07-26T10:08:21.045092Z pos=vm.go:470 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6s57b kind= uid=d37c3cd8-90bb-11e8-8e98-525500d15501 msg="VirtualMachineInstance created bacause testvmi6s57b was added." level=info timestamp=2018-07-26T10:08:21.045212Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6s57b kind= uid=d37c3cd8-90bb-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:08:21.045339Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6s57b kind= uid=d37c3cd8-90bb-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:08:21.078154Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6s57b kind= uid=fd008243-90ba-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:08:21.078213Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6s57b kind= uid=fd008243-90ba-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:08:21.093634Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6s57b kind= uid=fd008243-90ba-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:08:21.093723Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6s57b kind= uid=fd008243-90ba-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:08:21.140091Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6s57b kind= uid=fd008243-90ba-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:08:21.140232Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6s57b kind= uid=fd008243-90ba-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:08:21.164832Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6s57b kind= uid=fd008243-90ba-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:08:21.164967Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6s57b kind= uid=fd008243-90ba-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T09:34:20.153425Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:22.714014Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:22.714354Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:22.714595Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:27.835034Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:27.835243Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:27.835341Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:38.075655Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.100186Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:34:38.100905Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.101131Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.307694Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.308283Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.338335Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.338475Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T10:08:03.015319Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6s57b" level=info timestamp=2018-07-26T10:08:05.575640Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi6s57b kind= uid=be39ed12-90bb-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:08:05.575860Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi6s57b kind= uid=be39ed12-90bb-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:08:05.576019Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6s57b" level=info timestamp=2018-07-26T10:08:10.696363Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi6s57b kind= uid=be39ed12-90bb-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:08:10.696596Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi6s57b kind= uid=be39ed12-90bb-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:08:10.696700Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6s57b" level=info timestamp=2018-07-26T10:08:20.937053Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi6s57b kind= uid=be39ed12-90bb-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:08:20.952824Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6s57b kind= uid=be39ed12-90bb-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:08:20.955671Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi6s57b kind= uid=be39ed12-90bb-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:08:20.955750Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6s57b kind= uid=be39ed12-90bb-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:08:20.981594Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6s57b kind= uid=be39ed12-90bb-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:08:20.981858Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6s57b kind= uid=be39ed12-90bb-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:08:21.030417Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6s57b kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:08:21.030587Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6s57b kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi6s57b-gzct9 Pod phase: Pending • Failure [360.483 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should not update the VirtualMachineInstance spec if Running [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:346 Timed out after 360.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:353 ------------------------------ Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:11:16 http: TLS handshake error from 10.129.0.1:50998: EOF 2018/07/26 10:11:26 http: TLS handshake error from 10.129.0.1:51006: EOF 2018/07/26 10:11:36 http: TLS handshake error from 10.129.0.1:51014: EOF 2018/07/26 10:11:46 http: TLS handshake error from 10.129.0.1:51024: EOF 2018/07/26 10:11:56 http: TLS handshake error from 10.129.0.1:51032: EOF 2018/07/26 10:12:06 http: TLS handshake error from 10.129.0.1:51040: EOF 2018/07/26 10:12:16 http: TLS handshake error from 10.129.0.1:51048: EOF 2018/07/26 10:12:26 http: TLS handshake error from 10.129.0.1:51056: EOF level=info timestamp=2018-07-26T10:12:30.082649Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:12:36 http: TLS handshake error from 10.129.0.1:51064: EOF 2018/07/26 10:12:46 http: TLS handshake error from 10.129.0.1:51072: EOF 2018/07/26 10:12:56 http: TLS handshake error from 10.129.0.1:51080: EOF level=info timestamp=2018-07-26T10:13:00.083243Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:13:06 http: TLS handshake error from 10.129.0.1:51088: EOF 2018/07/26 10:13:16 http: TLS handshake error from 10.129.0.1:51096: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T10:12:44.626879Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:12:44.627487Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T10:12:44.629623Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T10:12:45.405218Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:12:45.406210Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:12:47 http: TLS handshake error from 10.129.0.1:57384: EOF level=info timestamp=2018-07-26T10:12:54.000385Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:12:54.679107Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:12:57 http: TLS handshake error from 10.129.0.1:57392: EOF level=info timestamp=2018-07-26T10:13:04.728379Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:13:07 http: TLS handshake error from 10.129.0.1:57400: EOF level=info timestamp=2018-07-26T10:13:14.770746Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:13:15.494818Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:13:15.495361Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:13:17 http: TLS handshake error from 10.129.0.1:57408: EOF Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:13:05.352348Z pos=vm.go:470 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipgp5x kind= uid=7cf246a5-90bc-11e8-8e98-525500d15501 msg="VirtualMachineInstance created bacause testvmipgp5x was added." level=info timestamp=2018-07-26T10:13:05.367987Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipgp5x kind= uid=d3d83b3c-90bb-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:13:05.368101Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipgp5x kind= uid=d3d83b3c-90bb-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:13:05.368294Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipgp5x kind= uid=d3d83b3c-90bb-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:13:05.368331Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipgp5x kind= uid=d3d83b3c-90bb-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:13:05.380693Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipgp5x kind= uid=d3d83b3c-90bb-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:13:05.380774Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipgp5x kind= uid=d3d83b3c-90bb-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:13:05.399758Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipgp5x kind= uid=d3d83b3c-90bb-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:13:05.399852Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipgp5x kind= uid=d3d83b3c-90bb-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:13:05.413983Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipgp5x kind= uid=d3d83b3c-90bb-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:13:05.414080Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipgp5x kind= uid=d3d83b3c-90bb-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:13:18.656945Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipgp5x kind= uid=d3d83b3c-90bb-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:13:18.657036Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipgp5x kind= uid=d3d83b3c-90bb-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:13:18.973273Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipgp5x kind= uid=d3d83b3c-90bb-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:13:18.973951Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipgp5x kind= uid=d3d83b3c-90bb-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T09:34:20.153425Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:22.714014Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:22.714354Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:22.714595Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:27.835034Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:27.835243Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:27.835341Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:38.075655Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.100186Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:34:38.100905Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.101131Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.307694Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.308283Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.338335Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.338475Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T10:13:19.119101Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmipgp5x kind= uid=7cf246a5-90bc-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:13:19.119293Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmipgp5x kind= uid=7cf246a5-90bc-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:13:19.119393Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmipgp5x" level=info timestamp=2018-07-26T10:13:19.279865Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmipgp5x kind= uid=7cf246a5-90bc-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:13:19.280056Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmipgp5x kind= uid=7cf246a5-90bc-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:13:19.280148Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmipgp5x" level=info timestamp=2018-07-26T10:13:19.600444Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmipgp5x kind= uid=7cf246a5-90bc-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:13:19.600641Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmipgp5x kind= uid=7cf246a5-90bc-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:13:19.600732Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmipgp5x" level=info timestamp=2018-07-26T10:13:20.241090Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmipgp5x kind= uid=7cf246a5-90bc-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:13:20.241371Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmipgp5x kind= uid=7cf246a5-90bc-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:13:20.241458Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmipgp5x" level=info timestamp=2018-07-26T10:13:21.521888Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmipgp5x kind= uid=7cf246a5-90bc-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:13:21.522338Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmipgp5x kind= uid=7cf246a5-90bc-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:13:21.522630Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmipgp5x" Pod name: virt-launcher-testvmipgp5x-5h6bq Pod phase: Running goroutine 10 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4201fc430, 0xc42019ec80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421912ca0, 0xc4200b6c20, 0xc4202ee4e0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc420096e40, 0xc42000e0e8, 0x13, 0x131ed4d, 0x4, 0xc42006ef18, 0x3, 0x3, 0xc420089940, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc420096e40, 0xc42000e0e8, 0x13, 0xc42191df18, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc42034c2c0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc42034c3c0, 0xc4200e66e0, 0xc4202f80d0, 0xc4202f8100, 0xc421915e00, 0xc4219280a0, 0x10f5e80, 0xc4200b6c20, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 • Failure [300.478 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should survive guest shutdown, multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:387 Timed out after 300.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:157 ------------------------------ STEP: Creating new VMI, not running STEP: Starting the VirtualMachineInstance STEP: VMI has the running condition VM testvmincdm6 was scheduled to start Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:17:26 http: TLS handshake error from 10.129.0.1:51298: EOF level=info timestamp=2018-07-26T10:17:30.318117Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:17:36 http: TLS handshake error from 10.129.0.1:51306: EOF 2018/07/26 10:17:46 http: TLS handshake error from 10.129.0.1:51316: EOF 2018/07/26 10:17:56 http: TLS handshake error from 10.129.0.1:51324: EOF level=info timestamp=2018-07-26T10:18:00.217242Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:18:06 http: TLS handshake error from 10.129.0.1:51332: EOF 2018/07/26 10:18:16 http: TLS handshake error from 10.129.0.1:51340: EOF 2018/07/26 10:18:26 http: TLS handshake error from 10.129.0.1:51348: EOF 2018/07/26 10:18:36 http: TLS handshake error from 10.129.0.1:51356: EOF 2018/07/26 10:18:46 http: TLS handshake error from 10.129.0.1:51364: EOF 2018/07/26 10:18:56 http: TLS handshake error from 10.129.0.1:51372: EOF level=info timestamp=2018-07-26T10:19:00.086023Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:19:06 http: TLS handshake error from 10.129.0.1:51380: EOF 2018/07/26 10:19:16 http: TLS handshake error from 10.129.0.1:51388: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T10:18:36.423321Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:18:37 http: TLS handshake error from 10.129.0.1:57668: EOF level=info timestamp=2018-07-26T10:18:46.485827Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:18:46.507825Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:18:46.514660Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:18:47 http: TLS handshake error from 10.129.0.1:57676: EOF level=info timestamp=2018-07-26T10:18:54.644560Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:18:56.537364Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:18:57 http: TLS handshake error from 10.129.0.1:57684: EOF level=info timestamp=2018-07-26T10:19:06.594511Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:19:07 http: TLS handshake error from 10.129.0.1:57692: EOF level=info timestamp=2018-07-26T10:19:16.643018Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:19:16.650013Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:19:16.663621Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:19:17 http: TLS handshake error from 10.129.0.1:57700: EOF Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:19:23.494399Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmincdm6 kind= uid=86f0eaf6-90bc-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:19:23.494441Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmincdm6 kind= uid=86f0eaf6-90bc-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:19:23.540939Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmincdm6 kind= uid=86f0eaf6-90bc-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:19:23.541032Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmincdm6 kind= uid=86f0eaf6-90bc-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:19:23.541092Z pos=vm.go:377 component=virt-controller service=http namespace=kubevirt-test-default name=testvmincdm6 kind= uid=86f0eaf6-90bc-11e8-8e98-525500d15501 msg="Setting stabile UUID '8f483cf8-caa1-5284-9af2-3dbd46dcbc45' (was '')" level=info timestamp=2018-07-26T10:19:23.561034Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmincdm6 kind= uid=5e5f25ca-90bd-11e8-8e98-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=info timestamp=2018-07-26T10:19:23.561158Z pos=vm.go:470 component=virt-controller service=http namespace=kubevirt-test-default name=testvmincdm6 kind= uid=5e5f25ca-90bd-11e8-8e98-525500d15501 msg="VirtualMachineInstance created bacause testvmincdm6 was added." level=info timestamp=2018-07-26T10:19:23.561227Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmincdm6 kind= uid=5e5f25ca-90bd-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:19:23.561269Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmincdm6 kind= uid=5e5f25ca-90bd-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:19:23.581329Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmincdm6 kind= uid=86f0eaf6-90bc-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:19:23.581433Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmincdm6 kind= uid=86f0eaf6-90bc-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:19:23.581869Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmincdm6 kind= uid=86f0eaf6-90bc-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:19:23.581933Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmincdm6 kind= uid=86f0eaf6-90bc-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:19:23.607300Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmincdm6 kind= uid=86f0eaf6-90bc-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:19:23.607405Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmincdm6 kind= uid=86f0eaf6-90bc-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T09:34:20.153425Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:22.714014Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:22.714354Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:22.714595Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:27.835034Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:27.835243Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:27.835341Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:38.075655Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.100186Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:34:38.100905Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.101131Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.307694Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.308283Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.338335Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.338475Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T10:19:03.644563Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmincdm6" level=info timestamp=2018-07-26T10:19:06.204880Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmincdm6 kind= uid=4814c900-90bd-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:19:06.205173Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmincdm6 kind= uid=4814c900-90bd-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:19:06.205276Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmincdm6" level=info timestamp=2018-07-26T10:19:11.325690Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmincdm6 kind= uid=4814c900-90bd-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:19:11.325942Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmincdm6 kind= uid=4814c900-90bd-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:19:11.326069Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmincdm6" level=info timestamp=2018-07-26T10:19:21.566563Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmincdm6 kind= uid=4814c900-90bd-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:19:23.098842Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmincdm6 kind= uid=4814c900-90bd-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:19:23.099127Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmincdm6 kind= uid=4814c900-90bd-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:19:23.099186Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmincdm6 kind= uid=4814c900-90bd-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:19:23.490744Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmincdm6 kind= uid=4814c900-90bd-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:19:23.491048Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmincdm6 kind= uid=4814c900-90bd-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:19:23.542401Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmincdm6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:19:23.542549Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmincdm6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmincdm6-xdtwn Pod phase: Pending • Failure [364.646 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 Using virtctl interface /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:435 should start a VirtualMachineInstance once [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:436 Timed out after 360.920s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:453 ------------------------------ STEP: getting an VMI STEP: Invoking virtctl start STEP: Getting the status of the VMI Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:23:26 http: TLS handshake error from 10.129.0.1:51590: EOF 2018/07/26 10:23:36 http: TLS handshake error from 10.129.0.1:51598: EOF 2018/07/26 10:23:46 http: TLS handshake error from 10.129.0.1:51608: EOF 2018/07/26 10:23:56 http: TLS handshake error from 10.129.0.1:51616: EOF 2018/07/26 10:24:06 http: TLS handshake error from 10.129.0.1:51624: EOF 2018/07/26 10:24:16 http: TLS handshake error from 10.129.0.1:51632: EOF 2018/07/26 10:24:26 http: TLS handshake error from 10.129.0.1:51640: EOF level=info timestamp=2018-07-26T10:24:28.901452Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T10:24:28.944290Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:24:36 http: TLS handshake error from 10.129.0.1:51648: EOF 2018/07/26 10:24:46 http: TLS handshake error from 10.129.0.1:51656: EOF 2018/07/26 10:24:56 http: TLS handshake error from 10.129.0.1:51664: EOF 2018/07/26 10:25:06 http: TLS handshake error from 10.129.0.1:51672: EOF 2018/07/26 10:25:16 http: TLS handshake error from 10.129.0.1:51680: EOF 2018/07/26 10:25:26 http: TLS handshake error from 10.129.0.1:51688: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T10:24:47.682287Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:24:47.688373Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:24:47 http: TLS handshake error from 10.129.0.1:57968: EOF level=info timestamp=2018-07-26T10:24:48.386444Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:24:55.341016Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:24:57 http: TLS handshake error from 10.129.0.1:57976: EOF level=info timestamp=2018-07-26T10:24:58.429376Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:25:00.113286Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:25:07 http: TLS handshake error from 10.129.0.1:57984: EOF level=info timestamp=2018-07-26T10:25:08.479050Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:25:17.783408Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:25:17.786512Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:25:17 http: TLS handshake error from 10.129.0.1:57992: EOF level=info timestamp=2018-07-26T10:25:18.529446Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:25:25.394610Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:25:19.729510Z pos=vm.go:470 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiclhxc kind= uid=32ab0c8c-90be-11e8-8e98-525500d15501 msg="VirtualMachineInstance created bacause testvmiclhxc was added." level=info timestamp=2018-07-26T10:25:19.729569Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiclhxc kind= uid=32ab0c8c-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:25:19.729676Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiclhxc kind= uid=32ab0c8c-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:25:19.750149Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiclhxc kind= uid=60b05311-90bd-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:25:19.750234Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiclhxc kind= uid=60b05311-90bd-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:25:19.750437Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiclhxc kind= uid=60b05311-90bd-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:25:19.750463Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiclhxc kind= uid=60b05311-90bd-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:25:19.768525Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiclhxc kind= uid=60b05311-90bd-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:25:19.768649Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiclhxc kind= uid=60b05311-90bd-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:25:19.788510Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiclhxc kind= uid=60b05311-90bd-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:25:19.788599Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiclhxc kind= uid=60b05311-90bd-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:25:19.792950Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiclhxc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiclhxc" level=info timestamp=2018-07-26T10:25:19.809577Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiclhxc kind= uid=60b05311-90bd-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:25:19.809672Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiclhxc kind= uid=60b05311-90bd-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:25:19.811999Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiclhxc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiclhxc" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T09:34:20.153425Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:22.714014Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:22.714354Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:22.714595Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:27.835034Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T09:34:27.835243Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T09:34:27.835341Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmir2jbk" level=info timestamp=2018-07-26T09:34:38.075655Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.100186Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:34:38.100905Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T09:34:38.101131Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.307694Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.308283Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.338335Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.338475Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T10:25:01.704788Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiclhxc" level=info timestamp=2018-07-26T10:25:04.265147Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiclhxc kind= uid=1cec268d-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:04.265360Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiclhxc kind= uid=1cec268d-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:04.265482Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiclhxc" level=info timestamp=2018-07-26T10:25:09.386050Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiclhxc kind= uid=1cec268d-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:09.386297Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiclhxc kind= uid=1cec268d-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:09.386436Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiclhxc" level=info timestamp=2018-07-26T10:25:19.626990Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmiclhxc kind= uid=1cec268d-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:25:19.641887Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiclhxc kind= uid=1cec268d-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:25:19.643310Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmiclhxc kind= uid=1cec268d-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:25:19.643476Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiclhxc kind= uid=1cec268d-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:25:19.661387Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiclhxc kind= uid=1cec268d-90be-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:25:19.661634Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiclhxc kind= uid=1cec268d-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:25:19.713763Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiclhxc kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:25:19.714051Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiclhxc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiclhxc-5vkkc Pod phase: Running level=info timestamp=2018-07-26T10:25:22.928960Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-26T10:25:22.929248Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-26T10:25:22.931729Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" • Failure [361.104 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 Using virtctl interface /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:435 should stop a VirtualMachineInstance once [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:467 Timed out after 360.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:480 ------------------------------ STEP: getting an VMI STEP: Invoking virtctl stop STEP: Ensuring VMI is running Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:23:56 http: TLS handshake error from 10.129.0.1:51616: EOF 2018/07/26 10:24:06 http: TLS handshake error from 10.129.0.1:51624: EOF 2018/07/26 10:24:16 http: TLS handshake error from 10.129.0.1:51632: EOF 2018/07/26 10:24:26 http: TLS handshake error from 10.129.0.1:51640: EOF level=info timestamp=2018-07-26T10:24:28.901452Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T10:24:28.944290Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:24:36 http: TLS handshake error from 10.129.0.1:51648: EOF 2018/07/26 10:24:46 http: TLS handshake error from 10.129.0.1:51656: EOF 2018/07/26 10:24:56 http: TLS handshake error from 10.129.0.1:51664: EOF 2018/07/26 10:25:06 http: TLS handshake error from 10.129.0.1:51672: EOF 2018/07/26 10:25:16 http: TLS handshake error from 10.129.0.1:51680: EOF 2018/07/26 10:25:26 http: TLS handshake error from 10.129.0.1:51688: EOF level=info timestamp=2018-07-26T10:25:30.179319Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:25:36 http: TLS handshake error from 10.129.0.1:51696: EOF 2018/07/26 10:25:46 http: TLS handshake error from 10.129.0.1:51704: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T10:25:18.529446Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:25:25.394610Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:25:27 http: TLS handshake error from 10.129.0.1:58000: EOF level=info timestamp=2018-07-26T10:25:28.574611Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:25:37 http: TLS handshake error from 10.129.0.1:58008: EOF level=info timestamp=2018-07-26T10:25:38.788639Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:25:43.350941Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T10:25:43.388167Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T10:25:47.748472Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:25:47.873412Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:25:47.873792Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:25:47 http: TLS handshake error from 10.129.0.1:58016: EOF level=info timestamp=2018-07-26T10:25:48.087774Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:25:48.118365Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:25:48.911757Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:25:19.792950Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiclhxc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiclhxc" level=info timestamp=2018-07-26T10:25:19.809577Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiclhxc kind= uid=60b05311-90bd-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:25:19.809672Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiclhxc kind= uid=60b05311-90bd-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:25:19.811999Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiclhxc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiclhxc" level=info timestamp=2018-07-26T10:25:27.712552Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiclhxc\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiclhxc, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 32ab0c8c-90be-11e8-8e98-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiclhxc" level=info timestamp=2018-07-26T10:25:28.713229Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil7rfq kind= uid=3786cec4-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:25:28.714888Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil7rfq kind= uid=3786cec4-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:25:28.728227Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7xdxj kind= uid=38080285-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:25:28.728804Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7xdxj kind= uid=38080285-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:25:28.746220Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixb2fz kind= uid=380a8361-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:25:28.746871Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixb2fz kind= uid=380a8361-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:25:28.829883Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:25:28.831103Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:25:28.832126Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmil7rfq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmil7rfq" level=info timestamp=2018-07-26T10:25:29.027414Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7xdxj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7xdxj" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T09:34:38.101131Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.307694Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.308283Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind= uid=07dffb2c-90b7-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T09:37:02.338335Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T09:37:02.338475Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmir2jbk kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:25:49.673535Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:50.754534Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="unexpected EOF" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:50.768938Z pos=vm.go:251 component=virt-handler reason="unexpected EOF" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:25:50.770592Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:50.770805Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:50.770900Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:25:50.776572Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:50.776703Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:50.776863Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:25:50.800592Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:50.801115Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:50.801252Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T10:25:49.908621Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmil7rfq kind= uid=3786cec4-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:49.908761Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmil7rfq kind= uid=3786cec4-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:49.908844Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmil7rfq" level=info timestamp=2018-07-26T10:25:49.921458Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmil7rfq kind= uid=3786cec4-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:49.921654Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmil7rfq kind= uid=3786cec4-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:49.921828Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmil7rfq" level=info timestamp=2018-07-26T10:25:49.962296Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmil7rfq kind= uid=3786cec4-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:49.962495Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmil7rfq kind= uid=3786cec4-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:49.962583Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmil7rfq" level=info timestamp=2018-07-26T10:25:50.061087Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmil7rfq kind= uid=3786cec4-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:50.061543Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmil7rfq kind= uid=3786cec4-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:50.061662Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmil7rfq" level=info timestamp=2018-07-26T10:25:50.222041Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmil7rfq kind= uid=3786cec4-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:50.222716Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmil7rfq kind= uid=3786cec4-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:50.222992Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmil7rfq" Pod name: virt-launcher-testvmi7xdxj-bhd26 Pod phase: Running level=info timestamp=2018-07-26T10:25:35.727523Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-26T10:25:35.727790Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-26T10:25:35.742540Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" Pod name: virt-launcher-testvmi9fwm4-8dggh Pod phase: Running [signal SIGSEGV: segmentation violation code=0x1 addr=0x0 pc=0x1061516] goroutine 12 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4202020b0, 0xc4201a6c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc420bc8040, 0xc42000eb20, 0xc42024fba0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c0c0, 0xc42000e070, 0x13, 0x131ed4d, 0x4, 0xc42006df18, 0x3, 0x3, 0xc4200ba280, 0x202c736d65747379, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c0c0, 0xc42000e070, 0x13, 0xc420beaf18, 0x3, 0x3, 0x22726568744f2220, 0x20202020200a202c, 0x2020202020202020) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc42040a040, 0xc4203fa5f0, 0xc4208eb670, 0xc4208eb680, 0xc420be4300, 0xc420bb99e0, 0x10f5e80, 0xc42000eb20, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a Pod name: virt-launcher-testvmil7rfq-lq9pz Pod phase: Running goroutine 33 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42027d260, 0xc4201a0c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42191ed20, 0xc4206e0ea8, 0xc4203ffbc0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200c6f60, 0xc4200b6318, 0x13, 0x131ed4d, 0x4, 0xc42006ff18, 0x3, 0x3, 0xc4200bbac0, 0xc42017e420, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200c6f60, 0xc4200b6318, 0x13, 0xc421950f18, 0x3, 0x3, 0xc42017e560, 0xc42017e580, 0xc42017e5c0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4204068c0, 0xc4200b8960, 0xc421aa2920, 0xc421aa2930, 0xc421921d80, 0xc42192c0a0, 0x10f5e80, 0xc4206e0ea8, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmixb2fz-drt8s Pod phase: Running level=info timestamp=2018-07-26T10:25:36.168480Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-26T10:25:36.168767Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-26T10:25:36.170510Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-26T10:25:49.780514Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-26T10:25:49.861234Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmixb2fz" level=info timestamp=2018-07-26T10:25:49.865351Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-26T10:25:49.865562Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:26:16 http: TLS handshake error from 10.129.0.1:51728: EOF 2018/07/26 10:26:26 http: TLS handshake error from 10.129.0.1:51736: EOF 2018/07/26 10:26:36 http: TLS handshake error from 10.129.0.1:51744: EOF 2018/07/26 10:26:46 http: TLS handshake error from 10.129.0.1:51752: EOF 2018/07/26 10:26:56 http: TLS handshake error from 10.129.0.1:51762: EOF 2018/07/26 10:27:06 http: TLS handshake error from 10.129.0.1:51770: EOF 2018/07/26 10:27:16 http: TLS handshake error from 10.129.0.1:51778: EOF 2018/07/26 10:27:26 http: TLS handshake error from 10.129.0.1:51786: EOF 2018/07/26 10:27:36 http: TLS handshake error from 10.129.0.1:51794: EOF 2018/07/26 10:27:46 http: TLS handshake error from 10.129.0.1:51802: EOF 2018/07/26 10:27:56 http: TLS handshake error from 10.129.0.1:51810: EOF level=info timestamp=2018-07-26T10:28:00.120788Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:28:06 http: TLS handshake error from 10.129.0.1:51818: EOF 2018/07/26 10:28:16 http: TLS handshake error from 10.129.0.1:51826: EOF 2018/07/26 10:28:26 http: TLS handshake error from 10.129.0.1:51834: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running 2018/07/26 10:27:47 http: TLS handshake error from 10.129.0.1:58114: EOF level=info timestamp=2018-07-26T10:27:48.322545Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:27:48.324706Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:27:49.641301Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:27:55.712361Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:27:57 http: TLS handshake error from 10.129.0.1:58122: EOF level=info timestamp=2018-07-26T10:27:59.678973Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:28:07 http: TLS handshake error from 10.129.0.1:58130: EOF level=info timestamp=2018-07-26T10:28:09.715820Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:28:17 http: TLS handshake error from 10.129.0.1:58138: EOF level=info timestamp=2018-07-26T10:28:18.430960Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:28:18.431648Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:28:19.762222Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:28:25.767607Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:28:27 http: TLS handshake error from 10.129.0.1:58146: EOF Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:25:19.792950Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiclhxc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiclhxc" level=info timestamp=2018-07-26T10:25:19.809577Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiclhxc kind= uid=60b05311-90bd-11e8-8e98-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T10:25:19.809672Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiclhxc kind= uid=60b05311-90bd-11e8-8e98-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-26T10:25:19.811999Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiclhxc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiclhxc" level=info timestamp=2018-07-26T10:25:27.712552Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiclhxc\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiclhxc, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 32ab0c8c-90be-11e8-8e98-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiclhxc" level=info timestamp=2018-07-26T10:25:28.713229Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil7rfq kind= uid=3786cec4-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:25:28.714888Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil7rfq kind= uid=3786cec4-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:25:28.728227Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7xdxj kind= uid=38080285-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:25:28.728804Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7xdxj kind= uid=38080285-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:25:28.746220Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixb2fz kind= uid=380a8361-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:25:28.746871Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixb2fz kind= uid=380a8361-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:25:28.829883Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:25:28.831103Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:25:28.832126Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmil7rfq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmil7rfq" level=info timestamp=2018-07-26T10:25:29.027414Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7xdxj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7xdxj" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=error timestamp=2018-07-26T10:25:52.052884Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:52.052986Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:25:53.333355Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:53.333602Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:53.333696Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:25:55.893991Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:55.894536Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:55.894690Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:01.015023Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:26:01.015731Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:26:01.016570Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:11.257627Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.272127Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:11.275587Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.275946Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T10:26:02.594088Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi7xdxj kind= uid=38080285-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:26:02.594351Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi7xdxj kind= uid=38080285-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:26:02.594466Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi7xdxj" level=info timestamp=2018-07-26T10:26:10.389167Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmil7rfq kind= uid=3786cec4-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:10.399537Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmil7rfq kind= uid=3786cec4-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:10.400508Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmil7rfq kind= uid=3786cec4-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:10.400579Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmil7rfq kind= uid=3786cec4-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:11.519253Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmixb2fz kind= uid=380a8361-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.528605Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixb2fz kind= uid=380a8361-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:11.528830Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmixb2fz kind= uid=380a8361-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.528895Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixb2fz kind= uid=380a8361-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:12.834874Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi7xdxj kind= uid=38080285-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:12.853641Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi7xdxj kind= uid=38080285-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:12.853891Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi7xdxj kind= uid=38080285-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:12.853979Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi7xdxj kind= uid=38080285-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi7xdxj-bhd26 Pod phase: Failed goroutine 27 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4201cc0c0, 0xc42018ac80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc420c12040, 0xc42000ea20, 0xc420c4ddc0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200ce120, 0xc4200d4070, 0x13, 0x131ed4d, 0x4, 0xc42006af18, 0x3, 0x3, 0xc420088540, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200ce120, 0xc4200d4070, 0x13, 0xc420c2c718, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc42068ea40) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc42031c040, 0xc4200d22d0, 0xc420c112c0, 0xc420c112d0, 0xc420c26300, 0xc420c087e0, 0x10f5e80, 0xc42000ea20, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmi9fwm4-8dggh Pod phase: Failed goroutine 12 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4202020b0, 0xc4201a6c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc420bc8040, 0xc42000eb20, 0xc42024fba0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c0c0, 0xc42000e070, 0x13, 0x131ed4d, 0x4, 0xc42006df18, 0x3, 0x3, 0xc4200ba280, 0x202c736d65747379, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c0c0, 0xc42000e070, 0x13, 0xc420beaf18, 0x3, 0x3, 0x22726568744f2220, 0x20202020200a202c, 0x2020202020202020) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc42040a040, 0xc4203fa5f0, 0xc4208eb670, 0xc4208eb680, 0xc420be4300, 0xc420bb99e0, 0x10f5e80, 0xc42000eb20, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmil7rfq-lq9pz Pod phase: Failed goroutine 33 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42027d260, 0xc4201a0c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42191ed20, 0xc4206e0ea8, 0xc4203ffbc0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200c6f60, 0xc4200b6318, 0x13, 0x131ed4d, 0x4, 0xc42006ff18, 0x3, 0x3, 0xc4200bbac0, 0xc42017e420, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200c6f60, 0xc4200b6318, 0x13, 0xc421950f18, 0x3, 0x3, 0xc42017e560, 0xc42017e580, 0xc42017e5c0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4204068c0, 0xc4200b8960, 0xc421aa2920, 0xc421aa2930, 0xc421921d80, 0xc42192c0a0, 0x10f5e80, 0xc4206e0ea8, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmixb2fz-drt8s Pod phase: Failed goroutine 16 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4201fc130, 0xc4201a0c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42191e040, 0xc42000fac8, 0xc420400d20, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c1e0, 0xc42000e078, 0x13, 0x131ed4d, 0x4, 0xc42006df18, 0x3, 0x3, 0xc420088440, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c1e0, 0xc42000e078, 0x13, 0xc42005df18, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc42034c300) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4200ba2c0, 0xc4200e6190, 0xc421a0fc30, 0xc421a0fc40, 0xc42194a300, 0xc421908d40, 0x10f5e80, 0xc42000fac8, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 • Failure in Spec Setup (BeforeEach) [181.977 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Unexpected Warning event received. Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:245 ------------------------------ level=info timestamp=2018-07-26T10:25:29.326252Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmil7rfq-lq9pz" level=info timestamp=2018-07-26T10:25:49.490421Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmil7rfq-lq9pz" level=error timestamp=2018-07-26T10:25:50.147376Z pos=utils.go:241 component=tests reason="unexpected warning event received" msg="unexpected EOF" Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:26:46 http: TLS handshake error from 10.129.0.1:51752: EOF 2018/07/26 10:26:56 http: TLS handshake error from 10.129.0.1:51762: EOF 2018/07/26 10:27:06 http: TLS handshake error from 10.129.0.1:51770: EOF 2018/07/26 10:27:16 http: TLS handshake error from 10.129.0.1:51778: EOF 2018/07/26 10:27:26 http: TLS handshake error from 10.129.0.1:51786: EOF 2018/07/26 10:27:36 http: TLS handshake error from 10.129.0.1:51794: EOF 2018/07/26 10:27:46 http: TLS handshake error from 10.129.0.1:51802: EOF 2018/07/26 10:27:56 http: TLS handshake error from 10.129.0.1:51810: EOF level=info timestamp=2018-07-26T10:28:00.120788Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:28:06 http: TLS handshake error from 10.129.0.1:51818: EOF 2018/07/26 10:28:16 http: TLS handshake error from 10.129.0.1:51826: EOF 2018/07/26 10:28:26 http: TLS handshake error from 10.129.0.1:51834: EOF level=info timestamp=2018-07-26T10:28:30.074637Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:28:36 http: TLS handshake error from 10.129.0.1:51842: EOF 2018/07/26 10:28:46 http: TLS handshake error from 10.129.0.1:51850: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T10:28:18.430960Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:28:18.431648Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:28:19.762222Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:28:25.767607Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:28:27 http: TLS handshake error from 10.129.0.1:58146: EOF level=info timestamp=2018-07-26T10:28:29.850958Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:28:37 http: TLS handshake error from 10.129.0.1:58154: EOF level=info timestamp=2018-07-26T10:28:40.304799Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:28:48 http: TLS handshake error from 10.129.0.1:58162: EOF level=info timestamp=2018-07-26T10:28:48.640317Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:28:48.640715Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:28:48.819963Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:28:49.166790Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:28:49.470305Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:28:50.369327Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:25:28.829883Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:25:28.831103Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:25:28.832126Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmil7rfq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmil7rfq" level=info timestamp=2018-07-26T10:25:29.027414Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7xdxj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7xdxj" level=info timestamp=2018-07-26T10:28:30.254072Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqn995 kind= uid=a43b31d6-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:28:30.254260Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqn995 kind= uid=a43b31d6-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:28:30.266351Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilxzg5 kind= uid=a43caeb0-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:28:30.266517Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilxzg5 kind= uid=a43caeb0-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:28:30.281100Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirv2wv kind= uid=a43eeab6-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:28:30.281192Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirv2wv kind= uid=a43eeab6-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:28:30.295296Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8kkf5 kind= uid=a440f6a4-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:28:30.295379Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8kkf5 kind= uid=a440f6a4-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:28:30.892407Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8kkf5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8kkf5" level=info timestamp=2018-07-26T10:28:31.098244Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirv2wv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirv2wv" level=info timestamp=2018-07-26T10:28:31.507281Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8kkf5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8kkf5" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T10:25:53.333696Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:25:55.893991Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:55.894536Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:55.894690Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:01.015023Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:26:01.015731Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:26:01.016570Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:11.257627Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.272127Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:11.275587Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.275946Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.259865Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.260482Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.352702Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.355901Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T10:28:29.653973Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi7xdxj kind= uid=38080285-90be-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:29.654308Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi7xdxj kind= uid=38080285-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:29.697823Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmil7rfq kind= uid=3786cec4-90be-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:29.698169Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmil7rfq kind= uid=3786cec4-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:29.731711Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi7xdxj kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:29.731853Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi7xdxj kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:29.744432Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixb2fz kind= uid=380a8361-90be-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:29.744822Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixb2fz kind= uid=380a8361-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:29.770127Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmil7rfq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:29.770249Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmil7rfq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:29.825856Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixb2fz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:29.826025Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixb2fz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:50.809416Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiqn995 kind= uid=a43b31d6-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:28:51.528444Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiqn995 kind= uid=a43b31d6-90be-11e8-8e98-525500d15501 reason="unexpected EOF" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:28:51.785717Z pos=vm.go:251 component=virt-handler reason="unexpected EOF" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiqn995" Pod name: virt-launcher-testvmi8kkf5-m7x7h Pod phase: Running level=info timestamp=2018-07-26T10:28:42.087362Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-26T10:28:42.087619Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-26T10:28:42.096080Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" Pod name: virt-launcher-testvmilxzg5-v5kng Pod phase: Running level=info timestamp=2018-07-26T10:28:38.348676Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-26T10:28:38.348959Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-26T10:28:38.351747Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" Pod name: virt-launcher-testvmiqn995-lg6m7 Pod phase: Running goroutine 33 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42027a180, 0xc42019ec80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421930040, 0xc42000f420, 0xc42029cbc0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc420096600, 0xc42000e088, 0x13, 0x131ed4d, 0x4, 0xc42006df18, 0x3, 0x3, 0xc4200bb580, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc420096600, 0xc42000e088, 0x13, 0xc421957718, 0x3, 0x3, 0x5cc549, 0xc4202df050, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4202ace00, 0xc4200e62d0, 0xc42192f150, 0xc42192f160, 0xc42194e300, 0xc421922d00, 0x10f5e80, 0xc42000f420, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmirv2wv-74lmv Pod phase: Running level=info timestamp=2018-07-26T10:28:36.860679Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-26T10:28:36.860938Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-26T10:28:36.862720Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:29:36 http: TLS handshake error from 10.129.0.1:51890: EOF 2018/07/26 10:29:46 http: TLS handshake error from 10.129.0.1:51898: EOF 2018/07/26 10:29:56 http: TLS handshake error from 10.129.0.1:51908: EOF 2018/07/26 10:30:06 http: TLS handshake error from 10.129.0.1:51916: EOF 2018/07/26 10:30:16 http: TLS handshake error from 10.129.0.1:51924: EOF 2018/07/26 10:30:26 http: TLS handshake error from 10.129.0.1:51932: EOF level=info timestamp=2018-07-26T10:30:30.156050Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:30:36 http: TLS handshake error from 10.129.0.1:51940: EOF 2018/07/26 10:30:46 http: TLS handshake error from 10.129.0.1:51948: EOF 2018/07/26 10:30:56 http: TLS handshake error from 10.129.0.1:51956: EOF level=info timestamp=2018-07-26T10:31:00.146267Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:31:06 http: TLS handshake error from 10.129.0.1:51964: EOF 2018/07/26 10:31:16 http: TLS handshake error from 10.129.0.1:51972: EOF 2018/07/26 10:31:26 http: TLS handshake error from 10.129.0.1:51980: EOF level=info timestamp=2018-07-26T10:31:30.107372Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-g7k67 Pod phase: Running 2018/07/26 10:30:47 http: TLS handshake error from 10.129.0.1:58260: EOF level=info timestamp=2018-07-26T10:30:49.008127Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:30:49.012898Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:30:50.943096Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:30:56.168149Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:30:57 http: TLS handshake error from 10.129.0.1:58268: EOF level=info timestamp=2018-07-26T10:31:00.992199Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:31:07 http: TLS handshake error from 10.129.0.1:58276: EOF level=info timestamp=2018-07-26T10:31:11.041887Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:31:17 http: TLS handshake error from 10.129.0.1:58284: EOF level=info timestamp=2018-07-26T10:31:19.082431Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:31:19.099655Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:31:21.100359Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:31:26.226596Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:31:27 http: TLS handshake error from 10.129.0.1:58292: EOF Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:25:28.829883Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:25:28.831103Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:25:28.832126Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmil7rfq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmil7rfq" level=info timestamp=2018-07-26T10:25:29.027414Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7xdxj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7xdxj" level=info timestamp=2018-07-26T10:28:30.254072Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqn995 kind= uid=a43b31d6-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:28:30.254260Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqn995 kind= uid=a43b31d6-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:28:30.266351Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilxzg5 kind= uid=a43caeb0-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:28:30.266517Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilxzg5 kind= uid=a43caeb0-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:28:30.281100Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirv2wv kind= uid=a43eeab6-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:28:30.281192Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirv2wv kind= uid=a43eeab6-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:28:30.295296Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8kkf5 kind= uid=a440f6a4-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:28:30.295379Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8kkf5 kind= uid=a440f6a4-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:28:30.892407Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8kkf5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8kkf5" level=info timestamp=2018-07-26T10:28:31.098244Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirv2wv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirv2wv" level=info timestamp=2018-07-26T10:28:31.507281Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8kkf5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8kkf5" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T10:25:53.333696Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:25:55.893991Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:55.894536Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:55.894690Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:01.015023Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:26:01.015731Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:26:01.016570Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:11.257627Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.272127Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:11.275587Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.275946Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.259865Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.260482Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.352702Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.355901Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T10:29:14.217544Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqn995 kind= uid=a43b31d6-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:29:14.217849Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmiqn995 kind= uid=a43b31d6-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:29:14.217990Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqn995 kind= uid=a43b31d6-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:29:16.522653Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirv2wv kind= uid=a43eeab6-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:29:16.532696Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirv2wv kind= uid=a43eeab6-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:29:16.534882Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirv2wv kind= uid=a43eeab6-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:29:16.535325Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirv2wv kind= uid=a43eeab6-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:29:17.429160Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmilxzg5 kind= uid=a43caeb0-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:29:17.442227Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmilxzg5 kind= uid=a43caeb0-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:29:17.444504Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmilxzg5 kind= uid=a43caeb0-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:29:17.444755Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmilxzg5 kind= uid=a43caeb0-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:29:18.072548Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi8kkf5 kind= uid=a440f6a4-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:29:18.082798Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi8kkf5 kind= uid=a440f6a4-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:29:18.082890Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi8kkf5 kind= uid=a440f6a4-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:29:18.082957Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi8kkf5 kind= uid=a440f6a4-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi8kkf5-m7x7h Pod phase: Failed goroutine 29 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4200a65e0, 0xc4201a0c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42191e040, 0xc4200b7fe0, 0xc420403fc0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200c6480, 0xc4200b6098, 0x13, 0x131ed4d, 0x4, 0xc420069f18, 0x3, 0x3, 0xc420088180, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200c6480, 0xc4200b6098, 0x13, 0xc420239f18, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc4215f9280) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420406040, 0xc4200b8500, 0xc421a30bc0, 0xc421a30bd0, 0xc421938300, 0xc421910d20, 0x10f5e80, 0xc4200b7fe0, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmilxzg5-v5kng Pod phase: Failed goroutine 14 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4200a62a0, 0xc4201b0c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421930040, 0xc4200b6330, 0xc42019bfc0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c120, 0xc4200b60a8, 0x13, 0x131ed4d, 0x4, 0xc42193ef18, 0x3, 0x3, 0xc4219d7780, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c120, 0xc4200b60a8, 0x13, 0xc421954f18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4202a6040, 0xc4200b8500, 0xc4202ea0f0, 0xc4202ea100, 0xc421950300, 0xc421924d20, 0x10f5e80, 0xc4200b6330, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmiqn995-lg6m7 Pod phase: Failed goroutine 33 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42027a180, 0xc42019ec80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421930040, 0xc42000f420, 0xc42029cbc0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc420096600, 0xc42000e088, 0x13, 0x131ed4d, 0x4, 0xc42006df18, 0x3, 0x3, 0xc4200bb580, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc420096600, 0xc42000e088, 0x13, 0xc421957718, 0x3, 0x3, 0x5cc549, 0xc4202df050, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4202ace00, 0xc4200e62d0, 0xc42192f150, 0xc42192f160, 0xc42194e300, 0xc421922d00, 0x10f5e80, 0xc42000f420, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmirv2wv-74lmv Pod phase: Failed goroutine 30 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42029c0b0, 0xc4201bac80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42193a040, 0xc42000f710, 0xc420265d20, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c180, 0xc42000e0b0, 0x13, 0x131ed4d, 0x4, 0xc420069f18, 0x3, 0x3, 0xc4200bb940, 0xc420193780, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c180, 0xc42000e0b0, 0x13, 0xc420257718, 0x3, 0x3, 0xc4201938c0, 0xc4201938e0, 0xc420193900) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4200ba180, 0xc4200d64b0, 0xc4207a4340, 0xc4207a4350, 0xc42194c300, 0xc42192ed40, 0x10f5e80, 0xc42000f710, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 • Failure in Spec Setup (BeforeEach) [181.520 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance with pod network connectivity explicitly set /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Unexpected Warning event received. Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:245 ------------------------------ level=info timestamp=2018-07-26T10:28:31.007501Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmiqn995-lg6m7" level=info timestamp=2018-07-26T10:28:51.227040Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmiqn995-lg6m7" level=error timestamp=2018-07-26T10:28:52.202733Z pos=utils.go:241 component=tests reason="unexpected warning event received" msg="unexpected EOF" Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:30:06 http: TLS handshake error from 10.129.0.1:51916: EOF 2018/07/26 10:30:16 http: TLS handshake error from 10.129.0.1:51924: EOF 2018/07/26 10:30:26 http: TLS handshake error from 10.129.0.1:51932: EOF level=info timestamp=2018-07-26T10:30:30.156050Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:30:36 http: TLS handshake error from 10.129.0.1:51940: EOF 2018/07/26 10:30:46 http: TLS handshake error from 10.129.0.1:51948: EOF 2018/07/26 10:30:56 http: TLS handshake error from 10.129.0.1:51956: EOF level=info timestamp=2018-07-26T10:31:00.146267Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:31:06 http: TLS handshake error from 10.129.0.1:51964: EOF 2018/07/26 10:31:16 http: TLS handshake error from 10.129.0.1:51972: EOF 2018/07/26 10:31:26 http: TLS handshake error from 10.129.0.1:51980: EOF level=info timestamp=2018-07-26T10:31:30.107372Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:31:36 http: TLS handshake error from 10.129.0.1:51988: EOF 2018/07/26 10:31:46 http: TLS handshake error from 10.129.0.1:51996: EOF 2018/07/26 10:31:56 http: TLS handshake error from 10.129.0.1:52004: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T10:31:26.226596Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:31:27 http: TLS handshake error from 10.129.0.1:58292: EOF level=info timestamp=2018-07-26T10:31:31.190550Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:31:38 http: TLS handshake error from 10.129.0.1:58300: EOF level=info timestamp=2018-07-26T10:31:41.252356Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:31:47 http: TLS handshake error from 10.129.0.1:58308: EOF level=info timestamp=2018-07-26T10:31:50.361561Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:31:50.371211Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:31:50.371405Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:31:50.628864Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:31:50.667450Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:31:51.385332Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:31:56.289589Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:31:57 http: TLS handshake error from 10.129.0.1:58316: EOF level=info timestamp=2018-07-26T10:32:00.185107Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:28:30.281192Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirv2wv kind= uid=a43eeab6-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:28:30.295296Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8kkf5 kind= uid=a440f6a4-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:28:30.295379Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8kkf5 kind= uid=a440f6a4-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:28:30.892407Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8kkf5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8kkf5" level=info timestamp=2018-07-26T10:28:31.098244Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirv2wv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirv2wv" level=info timestamp=2018-07-26T10:28:31.507281Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8kkf5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8kkf5" level=info timestamp=2018-07-26T10:31:31.598431Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibkcbp kind= uid=105242a7-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:31:31.598600Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibkcbp kind= uid=105242a7-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:31:31.611366Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizl6jz kind= uid=1053b2db-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:31:31.611465Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizl6jz kind= uid=1053b2db-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:31:31.618880Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9jlrv kind= uid=10561406-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:31:31.618971Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9jlrv kind= uid=10561406-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:31:31.644290Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmittlqj kind= uid=10586690-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:31:31.644488Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmittlqj kind= uid=10586690-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:31:31.678646Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibkcbp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibkcbp" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T10:25:53.333696Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:25:55.893991Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:55.894536Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:55.894690Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:01.015023Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:26:01.015731Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:26:01.016570Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:11.257627Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.272127Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:11.275587Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.275946Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.259865Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.260482Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.352702Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.355901Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=error timestamp=2018-07-26T10:32:00.184162Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlrv kind= uid=10561406-90bf-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:32:00.184273Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9jlrv" level=info timestamp=2018-07-26T10:32:00.187256Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmibkcbp kind= uid=105242a7-90bf-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:32:00.187363Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmibkcbp kind= uid=105242a7-90bf-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:32:00.187441Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmibkcbp" level=info timestamp=2018-07-26T10:32:00.228179Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlrv kind= uid=10561406-90bf-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:32:00.228406Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlrv kind= uid=10561406-90bf-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:32:00.228496Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9jlrv" level=info timestamp=2018-07-26T10:32:00.270314Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmibkcbp kind= uid=105242a7-90bf-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:32:00.270538Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmibkcbp kind= uid=105242a7-90bf-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:32:00.270663Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmibkcbp" level=info timestamp=2018-07-26T10:32:00.312788Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlrv kind= uid=10561406-90bf-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:32:00.313834Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlrv kind= uid=10561406-90bf-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:32:00.313963Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9jlrv" level=error timestamp=2018-07-26T10:32:00.339208Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmizl6jz kind= uid=1053b2db-90bf-11e8-8e98-525500d15501 reason="unexpected EOF" msg="Synchronizing the VirtualMachineInstance failed." Pod name: virt-launcher-testvmi9jlrv-vfkrb Pod phase: Running [signal SIGSEGV: segmentation violation code=0x1 addr=0x0 pc=0x1061516] goroutine 16 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42023e130, 0xc4201bcc80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421918040, 0xc4200b7c70, 0xc42029f080, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c060, 0xc42000e070, 0x13, 0x131ed4d, 0x4, 0xc42006af18, 0x3, 0x3, 0xc4219d7b40, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c060, 0xc42000e070, 0x13, 0xc421953f18, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc42169b300) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420353a40, 0xc4200ee1e0, 0xc4219a6e00, 0xc4219a6e10, 0xc421948300, 0xc42190cd20, 0x10f5e80, 0xc4200b7c70, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a Pod name: virt-launcher-testvmibkcbp-2fzdv Pod phase: Running [signal SIGSEGV: segmentation violation code=0x1 addr=0x0 pc=0x1061516] goroutine 29 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc420268170, 0xc4201a8c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421924040, 0xc42000f280, 0xc42032b280, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c060, 0xc42000e078, 0x13, 0x131ed4d, 0x4, 0xc420069f18, 0x3, 0x3, 0xc42029b980, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c060, 0xc42000e078, 0x13, 0xc421947f18, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc42048b280) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420334a80, 0xc4200d6230, 0xc421923510, 0xc421923520, 0xc421942300, 0xc421918d00, 0x10f5e80, 0xc42000f280, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a Pod name: virt-launcher-testvmittlqj-4wqrg Pod phase: Running goroutine 29 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4200a6290, 0xc4201a8c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42192e040, 0xc4200b7d58, 0xc42037cc60, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200c6600, 0xc4200b60a0, 0x13, 0x131ed4d, 0x4, 0xc420069f18, 0x3, 0x3, 0xc420088280, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200c6600, 0xc4200b60a0, 0x13, 0xc42028bf18, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc4208393c0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420334080, 0xc4200b8460, 0xc421aac9a0, 0xc421aac9b0, 0xc421948300, 0xc421922d40, 0x10f5e80, 0xc4200b7d58, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmizl6jz-cxvzm Pod phase: Running goroutine 33 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4202040b0, 0xc4201a8c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421934040, 0xc42000ebf0, 0xc420251f60, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c120, 0xc4200b60a0, 0x13, 0x131ed4d, 0x4, 0xc42006af18, 0x3, 0x3, 0xc4203545c0, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c120, 0xc4200b60a0, 0x13, 0xc42005df18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420354040, 0xc4200b85f0, 0xc4219dad30, 0xc4219dad40, 0xc421946300, 0xc421926d20, 0x10f5e80, 0xc42000ebf0, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:32:36 http: TLS handshake error from 10.129.0.1:52036: EOF 2018/07/26 10:32:46 http: TLS handshake error from 10.129.0.1:52044: EOF 2018/07/26 10:32:56 http: TLS handshake error from 10.129.0.1:52054: EOF 2018/07/26 10:33:06 http: TLS handshake error from 10.129.0.1:52062: EOF 2018/07/26 10:33:16 http: TLS handshake error from 10.129.0.1:52070: EOF 2018/07/26 10:33:26 http: TLS handshake error from 10.129.0.1:52078: EOF 2018/07/26 10:33:36 http: TLS handshake error from 10.129.0.1:52086: EOF 2018/07/26 10:33:46 http: TLS handshake error from 10.129.0.1:52094: EOF 2018/07/26 10:33:56 http: TLS handshake error from 10.129.0.1:52102: EOF level=info timestamp=2018-07-26T10:34:00.157425Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:34:06 http: TLS handshake error from 10.129.0.1:52110: EOF 2018/07/26 10:34:16 http: TLS handshake error from 10.129.0.1:52118: EOF 2018/07/26 10:34:26 http: TLS handshake error from 10.129.0.1:52126: EOF level=info timestamp=2018-07-26T10:34:28.926424Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T10:34:30.129080Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T10:33:50.756363Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:33:50.758846Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:33:51.993176Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:33:56.660524Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:33:57 http: TLS handshake error from 10.129.0.1:58414: EOF level=info timestamp=2018-07-26T10:34:02.045062Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:34:07 http: TLS handshake error from 10.129.0.1:58422: EOF level=info timestamp=2018-07-26T10:34:12.106061Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:34:17 http: TLS handshake error from 10.129.0.1:58430: EOF level=info timestamp=2018-07-26T10:34:20.892703Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:34:20.895803Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:34:22.178553Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:34:26.714189Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:34:27 http: TLS handshake error from 10.129.0.1:58438: EOF level=info timestamp=2018-07-26T10:34:28.899567Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:28:30.281192Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirv2wv kind= uid=a43eeab6-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:28:30.295296Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8kkf5 kind= uid=a440f6a4-90be-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:28:30.295379Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8kkf5 kind= uid=a440f6a4-90be-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:28:30.892407Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8kkf5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8kkf5" level=info timestamp=2018-07-26T10:28:31.098244Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirv2wv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirv2wv" level=info timestamp=2018-07-26T10:28:31.507281Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8kkf5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8kkf5" level=info timestamp=2018-07-26T10:31:31.598431Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibkcbp kind= uid=105242a7-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:31:31.598600Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibkcbp kind= uid=105242a7-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:31:31.611366Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizl6jz kind= uid=1053b2db-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:31:31.611465Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizl6jz kind= uid=1053b2db-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:31:31.618880Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9jlrv kind= uid=10561406-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:31:31.618971Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9jlrv kind= uid=10561406-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:31:31.644290Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmittlqj kind= uid=10586690-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:31:31.644488Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmittlqj kind= uid=10586690-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:31:31.678646Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibkcbp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibkcbp" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T10:25:53.333696Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:25:55.893991Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:55.894536Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:55.894690Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:01.015023Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:26:01.015731Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:26:01.016570Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:11.257627Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.272127Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:11.275587Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.275946Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.259865Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.260482Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.352702Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.355901Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T10:32:19.986594Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmittlqj kind= uid=10586690-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:32:19.987732Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmittlqj kind= uid=10586690-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:32:19.987874Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmittlqj kind= uid=10586690-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:32:20.600952Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmibkcbp kind= uid=105242a7-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:32:20.614228Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmibkcbp kind= uid=105242a7-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:32:20.614531Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmibkcbp kind= uid=105242a7-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:32:20.614589Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmibkcbp kind= uid=105242a7-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:32:20.686481Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlrv kind= uid=10561406-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:32:20.694264Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlrv kind= uid=10561406-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:32:20.694930Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlrv kind= uid=10561406-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:32:20.694965Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlrv kind= uid=10561406-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:32:20.832584Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmizl6jz kind= uid=1053b2db-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:32:20.847178Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmizl6jz kind= uid=1053b2db-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:32:20.848477Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmizl6jz kind= uid=1053b2db-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:32:20.848528Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmizl6jz kind= uid=1053b2db-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi9jlrv-vfkrb Pod phase: Failed goroutine 16 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42023e130, 0xc4201bcc80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421918040, 0xc4200b7c70, 0xc42029f080, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c060, 0xc42000e070, 0x13, 0x131ed4d, 0x4, 0xc42006af18, 0x3, 0x3, 0xc4219d7b40, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c060, 0xc42000e070, 0x13, 0xc421953f18, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc42169b300) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420353a40, 0xc4200ee1e0, 0xc4219a6e00, 0xc4219a6e10, 0xc421948300, 0xc42190cd20, 0x10f5e80, 0xc4200b7c70, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmibkcbp-2fzdv Pod phase: Failed goroutine 29 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc420268170, 0xc4201a8c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421924040, 0xc42000f280, 0xc42032b280, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c060, 0xc42000e078, 0x13, 0x131ed4d, 0x4, 0xc420069f18, 0x3, 0x3, 0xc42029b980, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c060, 0xc42000e078, 0x13, 0xc421947f18, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc42048b280) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420334a80, 0xc4200d6230, 0xc421923510, 0xc421923520, 0xc421942300, 0xc421918d00, 0x10f5e80, 0xc42000f280, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmittlqj-4wqrg Pod phase: Failed goroutine 29 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4200a6290, 0xc4201a8c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42192e040, 0xc4200b7d58, 0xc42037cc60, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200c6600, 0xc4200b60a0, 0x13, 0x131ed4d, 0x4, 0xc420069f18, 0x3, 0x3, 0xc420088280, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200c6600, 0xc4200b60a0, 0x13, 0xc42028bf18, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc4208393c0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420334080, 0xc4200b8460, 0xc421aac9a0, 0xc421aac9b0, 0xc421948300, 0xc421922d40, 0x10f5e80, 0xc4200b7d58, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmizl6jz-cxvzm Pod phase: Failed goroutine 33 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4202040b0, 0xc4201a8c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421934040, 0xc42000ebf0, 0xc420251f60, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c120, 0xc4200b60a0, 0x13, 0x131ed4d, 0x4, 0xc42006af18, 0x3, 0x3, 0xc4203545c0, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c120, 0xc4200b60a0, 0x13, 0xc42005df18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420354040, 0xc4200b85f0, 0xc4219dad30, 0xc4219dad40, 0xc421946300, 0xc421926d20, 0x10f5e80, 0xc42000ebf0, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 • Failure in Spec Setup (BeforeEach) [181.348 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance with custom MAC address /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Unexpected Warning event received. Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:245 ------------------------------ level=info timestamp=2018-07-26T10:31:32.180104Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmibkcbp-2fzdv" level=info timestamp=2018-07-26T10:31:59.799043Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmibkcbp-2fzdv" level=error timestamp=2018-07-26T10:32:00.498126Z pos=utils.go:241 component=tests reason="unexpected warning event received" msg="unexpected EOF" Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:33:16 http: TLS handshake error from 10.129.0.1:52070: EOF 2018/07/26 10:33:26 http: TLS handshake error from 10.129.0.1:52078: EOF 2018/07/26 10:33:36 http: TLS handshake error from 10.129.0.1:52086: EOF 2018/07/26 10:33:46 http: TLS handshake error from 10.129.0.1:52094: EOF 2018/07/26 10:33:56 http: TLS handshake error from 10.129.0.1:52102: EOF level=info timestamp=2018-07-26T10:34:00.157425Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:34:06 http: TLS handshake error from 10.129.0.1:52110: EOF 2018/07/26 10:34:16 http: TLS handshake error from 10.129.0.1:52118: EOF 2018/07/26 10:34:26 http: TLS handshake error from 10.129.0.1:52126: EOF level=info timestamp=2018-07-26T10:34:28.926424Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T10:34:30.129080Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:34:36 http: TLS handshake error from 10.129.0.1:52134: EOF 2018/07/26 10:34:46 http: TLS handshake error from 10.129.0.1:52142: EOF 2018/07/26 10:34:56 http: TLS handshake error from 10.129.0.1:52150: EOF level=info timestamp=2018-07-26T10:35:00.300076Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T10:34:32.213728Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:34:37 http: TLS handshake error from 10.129.0.1:58446: EOF level=info timestamp=2018-07-26T10:34:42.261328Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:34:44.356231Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T10:34:44.361293Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/26 10:34:47 http: TLS handshake error from 10.129.0.1:58454: EOF level=info timestamp=2018-07-26T10:34:51.049349Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:34:51.135901Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:34:51.461303Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:34:52.062701Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:34:52.112053Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:34:52.352220Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:34:56.825343Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:34:57 http: TLS handshake error from 10.129.0.1:58462: EOF level=info timestamp=2018-07-26T10:35:02.406610Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:31:31.618971Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9jlrv kind= uid=10561406-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:31:31.644290Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmittlqj kind= uid=10586690-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:31:31.644488Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmittlqj kind= uid=10586690-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:31:31.678646Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibkcbp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibkcbp" level=info timestamp=2018-07-26T10:34:33.364390Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibjksh kind= uid=7ca8ebdc-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:34:33.364559Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibjksh kind= uid=7ca8ebdc-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:34:33.385963Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis2bsh kind= uid=7cab126c-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:34:33.386152Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis2bsh kind= uid=7cab126c-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:34:33.467480Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikdx4c kind= uid=7cb29257-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:34:33.467689Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikdx4c kind= uid=7cb29257-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:34:33.531525Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2mj7q kind= uid=7cbbc1bf-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:34:33.531671Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2mj7q kind= uid=7cbbc1bf-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:34:33.604554Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibjksh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibjksh" level=info timestamp=2018-07-26T10:34:33.888482Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikdx4c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikdx4c" level=info timestamp=2018-07-26T10:34:34.017204Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmis2bsh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmis2bsh" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T10:25:53.333696Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:25:55.893991Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:55.894536Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:55.894690Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:01.015023Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:26:01.015731Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:26:01.016570Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:11.257627Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.272127Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:11.275587Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.275946Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.259865Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.260482Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.352702Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.355901Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T10:35:02.064882Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmibjksh kind= uid=7ca8ebdc-90bf-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:35:02.065093Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmibjksh kind= uid=7ca8ebdc-90bf-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:35:02.065205Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmibjksh" level=info timestamp=2018-07-26T10:35:02.231813Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi2mj7q kind= uid=7cbbc1bf-90bf-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:35:02.232087Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi2mj7q kind= uid=7cbbc1bf-90bf-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:35:02.232225Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2mj7q" level=info timestamp=2018-07-26T10:35:02.705504Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmibjksh kind= uid=7ca8ebdc-90bf-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:35:02.705710Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmibjksh kind= uid=7ca8ebdc-90bf-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:35:02.705811Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmibjksh" level=info timestamp=2018-07-26T10:35:02.789144Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmikdx4c kind= uid=7cb29257-90bf-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:35:02.789397Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmikdx4c kind= uid=7cb29257-90bf-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:35:02.789530Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmikdx4c" level=info timestamp=2018-07-26T10:35:02.864541Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmis2bsh kind= uid=7cab126c-90bf-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:35:02.864770Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmis2bsh kind= uid=7cab126c-90bf-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:35:02.864932Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmis2bsh" Pod name: virt-launcher-testvmi2mj7q-g2n2m Pod phase: Running goroutine 24 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4202c7de0, 0xc4201a8c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421908cc0, 0xc420464100, 0xc42037d220, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200c6c60, 0xc4200b6398, 0x13, 0x131ed4d, 0x4, 0xc420068f18, 0x3, 0x3, 0xc420089ac0, 0x6976656422202020, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200c6c60, 0xc4200b6398, 0x13, 0xc42025bf18, 0x3, 0x3, 0x2220202020202020, 0x6e5f656369766564, 0x5522203a22656d61) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc42029c900, 0xc4200b8960, 0xc421648430, 0xc421648440, 0xc42190be00, 0xc4219140a0, 0x10f5e80, 0xc420464100, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmibjksh-f6bc5 Pod phase: Running goroutine 15 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4204300c0, 0xc4201a0c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42193e040, 0xc42000ee08, 0xc42029c120, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c180, 0xc42000e078, 0x13, 0x131ed4d, 0x4, 0xc42006df18, 0x3, 0x3, 0xc4200baac0, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c180, 0xc42000e078, 0x13, 0xc421932718, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc4200bbfc0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4202b0100, 0xc4200e6370, 0xc421b5f780, 0xc421b5f790, 0xc42195c300, 0xc421928d80, 0x10f5e80, 0xc42000ee08, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmikdx4c-sd7c6 Pod phase: Running goroutine 13 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4200a66a0, 0xc4201a0c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421914040, 0xc42000f4d0, 0xc420401740, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c240, 0xc42000e078, 0x13, 0x131ed4d, 0x4, 0xc420069f18, 0x3, 0x3, 0xc420088140, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c240, 0xc42000e078, 0x13, 0xc42190b718, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420404040, 0xc4200e6190, 0xc421a3c990, 0xc421a3c9a0, 0xc421938300, 0xc4218fed20, 0x10f5e80, 0xc42000f4d0, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmis2bsh-6w8gm Pod phase: Running goroutine 15 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4200465f0, 0xc42018ac80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421920040, 0xc4200d5120, 0xc4200d0de0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200ce540, 0xc4200d46e0, 0x13, 0x131ed4d, 0x4, 0xc42006df18, 0x3, 0x3, 0xc420480640, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200ce540, 0xc4200d46e0, 0x13, 0xc42194ef18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420480040, 0xc4200d2500, 0xc4204fc480, 0xc4204fc490, 0xc421948300, 0xc421914d00, 0x10f5e80, 0xc4200d5120, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:35:36 http: TLS handshake error from 10.129.0.1:52182: EOF 2018/07/26 10:35:46 http: TLS handshake error from 10.129.0.1:52190: EOF 2018/07/26 10:35:56 http: TLS handshake error from 10.129.0.1:52200: EOF 2018/07/26 10:36:06 http: TLS handshake error from 10.129.0.1:52208: EOF 2018/07/26 10:36:16 http: TLS handshake error from 10.129.0.1:52216: EOF 2018/07/26 10:36:26 http: TLS handshake error from 10.129.0.1:52224: EOF level=info timestamp=2018-07-26T10:36:30.109635Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:36:36 http: TLS handshake error from 10.129.0.1:52232: EOF 2018/07/26 10:36:46 http: TLS handshake error from 10.129.0.1:52240: EOF 2018/07/26 10:36:56 http: TLS handshake error from 10.129.0.1:52248: EOF level=info timestamp=2018-07-26T10:37:00.155747Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:37:06 http: TLS handshake error from 10.129.0.1:52256: EOF 2018/07/26 10:37:16 http: TLS handshake error from 10.129.0.1:52264: EOF 2018/07/26 10:37:26 http: TLS handshake error from 10.129.0.1:52272: EOF level=info timestamp=2018-07-26T10:37:30.112757Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T10:36:51.373678Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:36:51.425331Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:36:52.976313Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:36:57.076863Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:36:57 http: TLS handshake error from 10.129.0.1:58560: EOF level=info timestamp=2018-07-26T10:37:03.017992Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:37:07 http: TLS handshake error from 10.129.0.1:58568: EOF level=info timestamp=2018-07-26T10:37:13.054859Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:37:17 http: TLS handshake error from 10.129.0.1:58576: EOF level=info timestamp=2018-07-26T10:37:21.463969Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:37:21.505445Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:37:23.101819Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:37:27.131422Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:37:27 http: TLS handshake error from 10.129.0.1:58584: EOF level=info timestamp=2018-07-26T10:37:33.157501Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:31:31.618971Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9jlrv kind= uid=10561406-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:31:31.644290Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmittlqj kind= uid=10586690-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:31:31.644488Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmittlqj kind= uid=10586690-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:31:31.678646Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibkcbp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibkcbp" level=info timestamp=2018-07-26T10:34:33.364390Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibjksh kind= uid=7ca8ebdc-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:34:33.364559Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibjksh kind= uid=7ca8ebdc-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:34:33.385963Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis2bsh kind= uid=7cab126c-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:34:33.386152Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis2bsh kind= uid=7cab126c-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:34:33.467480Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikdx4c kind= uid=7cb29257-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:34:33.467689Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikdx4c kind= uid=7cb29257-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:34:33.531525Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2mj7q kind= uid=7cbbc1bf-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:34:33.531671Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2mj7q kind= uid=7cbbc1bf-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:34:33.604554Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibjksh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibjksh" level=info timestamp=2018-07-26T10:34:33.888482Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikdx4c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikdx4c" level=info timestamp=2018-07-26T10:34:34.017204Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmis2bsh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmis2bsh" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T10:25:53.333696Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:25:55.893991Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:55.894536Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:55.894690Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:01.015023Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:26:01.015731Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:26:01.016570Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:11.257627Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.272127Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:11.275587Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.275946Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.259865Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.260482Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.352702Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.355901Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T10:35:20.723813Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikdx4c kind= uid=7cb29257-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:35:20.724155Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmikdx4c kind= uid=7cb29257-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:35:20.724208Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikdx4c kind= uid=7cb29257-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:35:20.793994Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmis2bsh kind= uid=7cab126c-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:35:20.804358Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmis2bsh kind= uid=7cab126c-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:35:20.804435Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmis2bsh kind= uid=7cab126c-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:35:20.804468Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmis2bsh kind= uid=7cab126c-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:35:21.435087Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2mj7q kind= uid=7cbbc1bf-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:35:21.446771Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2mj7q kind= uid=7cbbc1bf-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:35:21.447785Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2mj7q kind= uid=7cbbc1bf-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:35:21.447835Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2mj7q kind= uid=7cbbc1bf-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:35:21.909486Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmibjksh kind= uid=7ca8ebdc-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:35:21.937505Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmibjksh kind= uid=7ca8ebdc-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:35:21.938445Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmibjksh kind= uid=7ca8ebdc-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:35:21.938510Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmibjksh kind= uid=7ca8ebdc-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi2mj7q-g2n2m Pod phase: Failed goroutine 24 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4202c7de0, 0xc4201a8c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421908cc0, 0xc420464100, 0xc42037d220, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200c6c60, 0xc4200b6398, 0x13, 0x131ed4d, 0x4, 0xc420068f18, 0x3, 0x3, 0xc420089ac0, 0x6976656422202020, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200c6c60, 0xc4200b6398, 0x13, 0xc42025bf18, 0x3, 0x3, 0x2220202020202020, 0x6e5f656369766564, 0x5522203a22656d61) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc42029c900, 0xc4200b8960, 0xc421648430, 0xc421648440, 0xc42190be00, 0xc4219140a0, 0x10f5e80, 0xc420464100, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmibjksh-f6bc5 Pod phase: Failed goroutine 15 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4204300c0, 0xc4201a0c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42193e040, 0xc42000ee08, 0xc42029c120, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c180, 0xc42000e078, 0x13, 0x131ed4d, 0x4, 0xc42006df18, 0x3, 0x3, 0xc4200baac0, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c180, 0xc42000e078, 0x13, 0xc421932718, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc4200bbfc0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4202b0100, 0xc4200e6370, 0xc421b5f780, 0xc421b5f790, 0xc42195c300, 0xc421928d80, 0x10f5e80, 0xc42000ee08, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmikdx4c-sd7c6 Pod phase: Failed goroutine 13 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4200a66a0, 0xc4201a0c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421914040, 0xc42000f4d0, 0xc420401740, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c240, 0xc42000e078, 0x13, 0x131ed4d, 0x4, 0xc420069f18, 0x3, 0x3, 0xc420088140, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c240, 0xc42000e078, 0x13, 0xc42190b718, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420404040, 0xc4200e6190, 0xc421a3c990, 0xc421a3c9a0, 0xc421938300, 0xc4218fed20, 0x10f5e80, 0xc42000f4d0, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmis2bsh-6w8gm Pod phase: Failed goroutine 15 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4200465f0, 0xc42018ac80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421920040, 0xc4200d5120, 0xc4200d0de0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200ce540, 0xc4200d46e0, 0x13, 0x131ed4d, 0x4, 0xc42006df18, 0x3, 0x3, 0xc420480640, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200ce540, 0xc4200d46e0, 0x13, 0xc42194ef18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420480040, 0xc4200d2500, 0xc4204fc480, 0xc4204fc490, 0xc421948300, 0xc421914d00, 0x10f5e80, 0xc4200d5120, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 • Failure in Spec Setup (BeforeEach) [181.862 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the internet /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Unexpected Warning event received. Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:245 ------------------------------ level=info timestamp=2018-07-26T10:34:34.113300Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmibjksh-f6bc5" level=info timestamp=2018-07-26T10:35:01.367724Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmibjksh-f6bc5" level=error timestamp=2018-07-26T10:35:03.410700Z pos=utils.go:241 component=tests reason="unexpected warning event received" msg="unexpected EOF" Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:36:16 http: TLS handshake error from 10.129.0.1:52216: EOF 2018/07/26 10:36:26 http: TLS handshake error from 10.129.0.1:52224: EOF level=info timestamp=2018-07-26T10:36:30.109635Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:36:36 http: TLS handshake error from 10.129.0.1:52232: EOF 2018/07/26 10:36:46 http: TLS handshake error from 10.129.0.1:52240: EOF 2018/07/26 10:36:56 http: TLS handshake error from 10.129.0.1:52248: EOF level=info timestamp=2018-07-26T10:37:00.155747Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:37:06 http: TLS handshake error from 10.129.0.1:52256: EOF 2018/07/26 10:37:16 http: TLS handshake error from 10.129.0.1:52264: EOF 2018/07/26 10:37:26 http: TLS handshake error from 10.129.0.1:52272: EOF level=info timestamp=2018-07-26T10:37:30.112757Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:37:36 http: TLS handshake error from 10.129.0.1:52280: EOF 2018/07/26 10:37:46 http: TLS handshake error from 10.129.0.1:52288: EOF 2018/07/26 10:37:56 http: TLS handshake error from 10.129.0.1:52296: EOF level=info timestamp=2018-07-26T10:38:00.638448Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-g7k67 Pod phase: Running 2018/07/26 10:37:27 http: TLS handshake error from 10.129.0.1:58584: EOF level=info timestamp=2018-07-26T10:37:33.157501Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:37:37 http: TLS handshake error from 10.129.0.1:58592: EOF level=info timestamp=2018-07-26T10:37:43.411324Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:37:44.379164Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T10:37:44.395104Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/26 10:37:47 http: TLS handshake error from 10.129.0.1:58600: EOF level=info timestamp=2018-07-26T10:37:52.212658Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:37:52.659392Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:37:52.993486Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:37:53.340423Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:37:53.365329Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:37:53.487318Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:37:57.220430Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:37:57 http: TLS handshake error from 10.129.0.1:58608: EOF Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:34:33.531671Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2mj7q kind= uid=7cbbc1bf-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:34:33.604554Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibjksh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibjksh" level=info timestamp=2018-07-26T10:34:33.888482Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikdx4c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikdx4c" level=info timestamp=2018-07-26T10:34:34.017204Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmis2bsh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmis2bsh" level=info timestamp=2018-07-26T10:37:35.025046Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijdlbn kind= uid=e8f0bf5c-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:37:35.025216Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijdlbn kind= uid=e8f0bf5c-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:37:35.046277Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifj9hk kind= uid=e8f338d4-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:37:35.046383Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifj9hk kind= uid=e8f338d4-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:37:35.061455Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiw8zj2 kind= uid=e8f5cb9c-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:37:35.061537Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiw8zj2 kind= uid=e8f5cb9c-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:37:35.074778Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidm8n4 kind= uid=e8f7dc25-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:37:35.074827Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidm8n4 kind= uid=e8f7dc25-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:37:35.125406Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijdlbn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijdlbn" level=info timestamp=2018-07-26T10:37:35.860682Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidm8n4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidm8n4" level=info timestamp=2018-07-26T10:37:36.290340Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidm8n4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidm8n4" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T10:25:53.333696Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:25:55.893991Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:55.894536Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:55.894690Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:01.015023Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:26:01.015731Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:26:01.016570Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:11.257627Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.272127Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:11.275587Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.275946Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.259865Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.260482Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.352702Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.355901Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T10:38:01.497536Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmijdlbn kind= uid=e8f0bf5c-90bf-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:38:01.497613Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmijdlbn kind= uid=e8f0bf5c-90bf-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:38:01.497684Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmijdlbn" level=info timestamp=2018-07-26T10:38:01.540177Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmifj9hk kind= uid=e8f338d4-90bf-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:38:01.540391Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmifj9hk kind= uid=e8f338d4-90bf-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:38:01.540499Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmifj9hk" level=info timestamp=2018-07-26T10:38:01.540585Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmijdlbn kind= uid=e8f0bf5c-90bf-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:38:01.540664Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmijdlbn kind= uid=e8f0bf5c-90bf-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:38:01.540866Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmijdlbn" level=info timestamp=2018-07-26T10:38:01.623626Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmijdlbn kind= uid=e8f0bf5c-90bf-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:38:01.623885Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmijdlbn kind= uid=e8f0bf5c-90bf-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:38:01.623996Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmijdlbn" level=info timestamp=2018-07-26T10:38:01.624102Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmifj9hk kind= uid=e8f338d4-90bf-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:38:01.624208Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmifj9hk kind= uid=e8f338d4-90bf-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:38:01.624279Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmifj9hk" Pod name: virt-launcher-testvmidm8n4-wkq2k Pod phase: Running level=info timestamp=2018-07-26T10:37:43.816339Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-26T10:37:43.816622Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-26T10:37:43.818495Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-26T10:38:01.161585Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-26T10:38:01.287681Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmidm8n4" level=info timestamp=2018-07-26T10:38:01.289233Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-26T10:38:01.289476Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmifj9hk-9g6lx Pod phase: Running [signal SIGSEGV: segmentation violation code=0x1 addr=0x0 pc=0x1061516] goroutine 29 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc420540130, 0xc42019ec80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc420556000, 0xc42000ec80, 0xc421948b20, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc420096180, 0xc42000e088, 0x13, 0x131ed4d, 0x4, 0xc42006df18, 0x3, 0x3, 0xc4200ba280, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc420096180, 0xc42000e088, 0x13, 0xc42193ff18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4204026c0, 0xc4200e6280, 0xc4209b7730, 0xc4209b7740, 0xc421923e80, 0xc42000c0a0, 0x10f5e80, 0xc42000ec80, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a Pod name: virt-launcher-testvmijdlbn-qfr8k Pod phase: Running [signal SIGSEGV: segmentation violation code=0x1 addr=0x0 pc=0x1061516] goroutine 32 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4202a8780, 0xc4200f8500, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421910ce0, 0xc421aaa580, 0xc420311ec0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200ced80, 0xc4200d46c0, 0x13, 0x131ed4d, 0x4, 0xc42006ff18, 0x3, 0x3, 0xc420089b40, 0x13, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200ced80, 0xc4200d46c0, 0x13, 0xc4206d8f18, 0x3, 0x3, 0x1, 0x4, 0xc420216620) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420282100, 0xc4200d2b90, 0xc42191d8b0, 0xc42191d8c0, 0xc421913d80, 0xc42191e0a0, 0x10f5e80, 0xc421aaa580, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a Pod name: virt-launcher-testvmiw8zj2-zncxs Pod phase: Running level=info timestamp=2018-07-26T10:37:44.409890Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-26T10:37:44.410154Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-26T10:37:44.428507Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-26T10:37:59.645190Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-26T10:37:59.780050Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiw8zj2" level=info timestamp=2018-07-26T10:37:59.782813Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-26T10:37:59.786334Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" level=info timestamp=2018-07-26T10:38:01.979217Z pos=cloud-init.go:254 component=virt-launcher msg="generated nocloud iso file /var/run/libvirt/kubevirt-ephemeral-disk/cloud-init-data/kubevirt-test-default/testvmiw8zj2/noCloud.iso" level=error timestamp=2018-07-26T10:38:02.005353Z pos=common.go:126 component=virt-launcher msg="updated MAC for interface: eth0 - 0a:58:0a:be:e2:69" level=info timestamp=2018-07-26T10:38:02.019623Z pos=converter.go:751 component=virt-launcher msg="Found nameservers in /etc/resolv.conf: \ufffd\ufffdBf" level=info timestamp=2018-07-26T10:38:02.019728Z pos=converter.go:752 component=virt-launcher msg="Found search domains in /etc/resolv.conf: kubevirt-test-default.svc.cluster.local svc.cluster.local cluster.local" level=info timestamp=2018-07-26T10:38:02.019945Z pos=dhcp.go:62 component=virt-launcher msg="Starting SingleClientDHCPServer" Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:38:36 http: TLS handshake error from 10.129.0.1:52328: EOF 2018/07/26 10:38:46 http: TLS handshake error from 10.129.0.1:52336: EOF 2018/07/26 10:38:56 http: TLS handshake error from 10.129.0.1:52346: EOF level=info timestamp=2018-07-26T10:39:00.161251Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:39:06 http: TLS handshake error from 10.129.0.1:52354: EOF 2018/07/26 10:39:16 http: TLS handshake error from 10.129.0.1:52362: EOF 2018/07/26 10:39:26 http: TLS handshake error from 10.129.0.1:52370: EOF 2018/07/26 10:39:36 http: TLS handshake error from 10.129.0.1:52378: EOF 2018/07/26 10:39:46 http: TLS handshake error from 10.129.0.1:52386: EOF 2018/07/26 10:39:56 http: TLS handshake error from 10.129.0.1:52394: EOF level=info timestamp=2018-07-26T10:40:00.199063Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:40:06 http: TLS handshake error from 10.129.0.1:52402: EOF 2018/07/26 10:40:16 http: TLS handshake error from 10.129.0.1:52410: EOF 2018/07/26 10:40:26 http: TLS handshake error from 10.129.0.1:52418: EOF level=info timestamp=2018-07-26T10:40:30.129206Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T10:39:53.166951Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:39:53.172368Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:39:54.130384Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:39:57.435508Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:39:57 http: TLS handshake error from 10.129.0.1:58706: EOF level=info timestamp=2018-07-26T10:40:04.174192Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:40:07 http: TLS handshake error from 10.129.0.1:58714: EOF level=info timestamp=2018-07-26T10:40:14.224747Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:40:17 http: TLS handshake error from 10.129.0.1:58722: EOF level=info timestamp=2018-07-26T10:40:23.254675Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:40:23.260426Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:40:24.268647Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:40:27.494271Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:40:27 http: TLS handshake error from 10.129.0.1:58730: EOF level=info timestamp=2018-07-26T10:40:34.330283Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:34:33.531671Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2mj7q kind= uid=7cbbc1bf-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:34:33.604554Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibjksh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibjksh" level=info timestamp=2018-07-26T10:34:33.888482Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikdx4c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikdx4c" level=info timestamp=2018-07-26T10:34:34.017204Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmis2bsh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmis2bsh" level=info timestamp=2018-07-26T10:37:35.025046Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijdlbn kind= uid=e8f0bf5c-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:37:35.025216Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijdlbn kind= uid=e8f0bf5c-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:37:35.046277Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifj9hk kind= uid=e8f338d4-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:37:35.046383Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifj9hk kind= uid=e8f338d4-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:37:35.061455Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiw8zj2 kind= uid=e8f5cb9c-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:37:35.061537Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiw8zj2 kind= uid=e8f5cb9c-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:37:35.074778Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidm8n4 kind= uid=e8f7dc25-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:37:35.074827Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidm8n4 kind= uid=e8f7dc25-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:37:35.125406Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijdlbn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijdlbn" level=info timestamp=2018-07-26T10:37:35.860682Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidm8n4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidm8n4" level=info timestamp=2018-07-26T10:37:36.290340Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidm8n4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidm8n4" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T10:25:53.333696Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:25:55.893991Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:55.894536Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:55.894690Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:01.015023Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:26:01.015731Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:26:01.016570Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:11.257627Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.272127Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:11.275587Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.275946Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.259865Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.260482Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.352702Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.355901Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T10:38:21.961410Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmijdlbn kind= uid=e8f0bf5c-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:38:21.963280Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmijdlbn kind= uid=e8f0bf5c-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:38:21.963332Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmijdlbn kind= uid=e8f0bf5c-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:38:22.044000Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmifj9hk kind= uid=e8f338d4-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:38:22.056811Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmifj9hk kind= uid=e8f338d4-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:38:22.057840Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmifj9hk kind= uid=e8f338d4-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:38:22.057892Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmifj9hk kind= uid=e8f338d4-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:38:23.013706Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmiw8zj2 kind= uid=e8f5cb9c-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:38:23.025860Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiw8zj2 kind= uid=e8f5cb9c-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:38:23.025982Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmiw8zj2 kind= uid=e8f5cb9c-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:38:23.026032Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiw8zj2 kind= uid=e8f5cb9c-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:38:23.030034Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmidm8n4 kind= uid=e8f7dc25-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:38:23.044757Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidm8n4 kind= uid=e8f7dc25-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:38:23.045579Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmidm8n4 kind= uid=e8f7dc25-90bf-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:38:23.045632Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidm8n4 kind= uid=e8f7dc25-90bf-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmidm8n4-wkq2k Pod phase: Failed goroutine 15 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42025a150, 0xc4201bac80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42192c040, 0xc4200b76c8, 0xc42035f700, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c120, 0xc42000e0b0, 0x13, 0x131ed4d, 0x4, 0xc42006af18, 0x3, 0x3, 0xc4200ba080, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c120, 0xc42000e0b0, 0x13, 0xc421951718, 0x3, 0x3, 0x5cc549, 0xc4204e1bf0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4200ba200, 0xc4200d64b0, 0xc421a24260, 0xc421a24270, 0xc421948300, 0xc421920d40, 0x10f5e80, 0xc4200b76c8, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmifj9hk-9g6lx Pod phase: Failed goroutine 29 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc420540130, 0xc42019ec80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc420556000, 0xc42000ec80, 0xc421948b20, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc420096180, 0xc42000e088, 0x13, 0x131ed4d, 0x4, 0xc42006df18, 0x3, 0x3, 0xc4200ba280, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc420096180, 0xc42000e088, 0x13, 0xc42193ff18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4204026c0, 0xc4200e6280, 0xc4209b7730, 0xc4209b7740, 0xc421923e80, 0xc42000c0a0, 0x10f5e80, 0xc42000ec80, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmijdlbn-qfr8k Pod phase: Failed goroutine 32 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4202a8780, 0xc4200f8500, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421910ce0, 0xc421aaa580, 0xc420311ec0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200ced80, 0xc4200d46c0, 0x13, 0x131ed4d, 0x4, 0xc42006ff18, 0x3, 0x3, 0xc420089b40, 0x13, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200ced80, 0xc4200d46c0, 0x13, 0xc4206d8f18, 0x3, 0x3, 0x1, 0x4, 0xc420216620) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420282100, 0xc4200d2b90, 0xc42191d8b0, 0xc42191d8c0, 0xc421913d80, 0xc42191e0a0, 0x10f5e80, 0xc421aaa580, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmiw8zj2-zncxs Pod phase: Failed goroutine 31 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42027c0d0, 0xc4201a0c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421916040, 0xc4200b7928, 0xc420342340, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c180, 0xc42000e078, 0x13, 0x131ed4d, 0x4, 0xc420069f18, 0x3, 0x3, 0xc420089680, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c180, 0xc42000e078, 0x13, 0xc420057f18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420089140, 0xc4200e6190, 0xc4219c8f70, 0xc4219c8f80, 0xc421932300, 0xc421908d40, 0x10f5e80, 0xc4200b7928, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 • Failure in Spec Setup (BeforeEach) [181.845 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on the same node from Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Unexpected Warning event received. Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:245 ------------------------------ level=info timestamp=2018-07-26T10:37:35.600684Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmijdlbn-qfr8k" level=info timestamp=2018-07-26T10:38:01.090225Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmijdlbn-qfr8k" level=error timestamp=2018-07-26T10:38:01.878990Z pos=utils.go:241 component=tests reason="unexpected warning event received" msg="unexpected EOF" Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running level=info timestamp=2018-07-26T10:39:00.161251Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:39:06 http: TLS handshake error from 10.129.0.1:52354: EOF 2018/07/26 10:39:16 http: TLS handshake error from 10.129.0.1:52362: EOF 2018/07/26 10:39:26 http: TLS handshake error from 10.129.0.1:52370: EOF 2018/07/26 10:39:36 http: TLS handshake error from 10.129.0.1:52378: EOF 2018/07/26 10:39:46 http: TLS handshake error from 10.129.0.1:52386: EOF 2018/07/26 10:39:56 http: TLS handshake error from 10.129.0.1:52394: EOF level=info timestamp=2018-07-26T10:40:00.199063Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:40:06 http: TLS handshake error from 10.129.0.1:52402: EOF 2018/07/26 10:40:16 http: TLS handshake error from 10.129.0.1:52410: EOF 2018/07/26 10:40:26 http: TLS handshake error from 10.129.0.1:52418: EOF level=info timestamp=2018-07-26T10:40:30.129206Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:40:36 http: TLS handshake error from 10.129.0.1:52426: EOF 2018/07/26 10:40:46 http: TLS handshake error from 10.129.0.1:52434: EOF 2018/07/26 10:40:56 http: TLS handshake error from 10.129.0.1:52442: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T10:40:34.330283Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:40:37 http: TLS handshake error from 10.129.0.1:58738: EOF level=info timestamp=2018-07-26T10:40:43.397670Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T10:40:43.567214Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T10:40:44.397300Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:40:47 http: TLS handshake error from 10.129.0.1:58746: EOF level=info timestamp=2018-07-26T10:40:53.410421Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:40:53.410696Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:40:53.974615Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:40:54.290021Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:40:54.383406Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:40:54.648159Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:40:57.550306Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:40:57 http: TLS handshake error from 10.129.0.1:58754: EOF level=info timestamp=2018-07-26T10:41:00.954165Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:37:35.061537Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiw8zj2 kind= uid=e8f5cb9c-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:37:35.074778Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidm8n4 kind= uid=e8f7dc25-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:37:35.074827Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidm8n4 kind= uid=e8f7dc25-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:37:35.125406Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijdlbn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijdlbn" level=info timestamp=2018-07-26T10:37:35.860682Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidm8n4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidm8n4" level=info timestamp=2018-07-26T10:37:36.290340Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidm8n4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidm8n4" level=info timestamp=2018-07-26T10:40:36.917515Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6t77 kind= uid=555aaeed-90c0-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:40:36.917685Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6t77 kind= uid=555aaeed-90c0-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:40:36.932185Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizwmfj kind= uid=555d205b-90c0-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:40:36.932267Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizwmfj kind= uid=555d205b-90c0-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:40:36.940257Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihf65c kind= uid=555f5cd8-90c0-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:40:36.940312Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihf65c kind= uid=555f5cd8-90c0-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:40:36.962618Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij8ll7 kind= uid=5560ebb3-90c0-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:40:36.962703Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij8ll7 kind= uid=5560ebb3-90c0-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:40:37.287268Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizwmfj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizwmfj" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T10:25:53.333696Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:25:55.893991Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:55.894536Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:55.894690Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:01.015023Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:26:01.015731Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:26:01.016570Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:11.257627Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.272127Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:11.275587Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.275946Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.259865Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.260482Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.352702Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.355901Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T10:41:03.552281Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiv6t77 kind= uid=555aaeed-90c0-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:41:03.552439Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiv6t77 kind= uid=555aaeed-90c0-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:41:03.552543Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6t77" level=info timestamp=2018-07-26T10:41:03.595299Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiv6t77 kind= uid=555aaeed-90c0-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:41:03.595539Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiv6t77 kind= uid=555aaeed-90c0-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:41:03.595713Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6t77" level=info timestamp=2018-07-26T10:41:03.615606Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmizwmfj kind= uid=555d205b-90c0-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:41:03.615816Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmizwmfj kind= uid=555d205b-90c0-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:41:03.615933Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmizwmfj" level=info timestamp=2018-07-26T10:41:03.630443Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmihf65c kind= uid=555f5cd8-90c0-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:41:03.630668Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmihf65c kind= uid=555f5cd8-90c0-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:41:03.630765Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmihf65c" level=info timestamp=2018-07-26T10:41:03.636048Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiv6t77 kind= uid=555aaeed-90c0-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:41:03.636233Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiv6t77 kind= uid=555aaeed-90c0-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:41:03.636400Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6t77" Pod name: virt-launcher-testvmihf65c-7zphk Pod phase: Running goroutine 15 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42024e0e0, 0xc4200f8280, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421916040, 0xc42000fb28, 0xc42041f240, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200b2120, 0xc42000e0b8, 0x13, 0x131ed4d, 0x4, 0xc421924f18, 0x3, 0x3, 0xc420481180, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200b2120, 0xc42000e0b8, 0x13, 0xc42042cf18, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc4206d8f00) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420480f00, 0xc4200aa4b0, 0xc4207b9600, 0xc4207b9610, 0xc421948300, 0xc421908d00, 0x10f5e80, 0xc42000fb28, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmij8ll7-2r2wp Pod phase: Running level=info timestamp=2018-07-26T10:40:44.805223Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-26T10:40:44.805481Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-26T10:40:44.815708Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-26T10:41:02.253246Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-26T10:41:02.316301Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmij8ll7" level=info timestamp=2018-07-26T10:41:02.317970Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-26T10:41:02.318110Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmiv6t77-bqc4l Pod phase: Running goroutine 13 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42027c0d0, 0xc4201a0c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421926040, 0xc4200b77b8, 0xc4202f09c0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c120, 0xc42000e078, 0x13, 0x131ed4d, 0x4, 0xc42006af18, 0x3, 0x3, 0xc4202ae780, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c120, 0xc42000e078, 0x13, 0xc42005d718, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc420503b00) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc42034e3c0, 0xc4200e63c0, 0xc4219254b0, 0xc4219254c0, 0xc42194c300, 0xc421918d00, 0x10f5e80, 0xc4200b77b8, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmizwmfj-726cd Pod phase: Running goroutine 29 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42022a0f0, 0xc4201a8c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42191e040, 0xc42000feb0, 0xc4202d9680, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200c6660, 0xc42000e070, 0x13, 0x131ed4d, 0x4, 0xc420069f18, 0x3, 0x3, 0xc420088280, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200c6660, 0xc42000e070, 0x13, 0xc421917718, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc42065fc80) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420335980, 0xc4200d6280, 0xc4204ce340, 0xc4204ce350, 0xc42194c300, 0xc42190ad20, 0x10f5e80, 0xc42000feb0, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:41:46 http: TLS handshake error from 10.129.0.1:52482: EOF 2018/07/26 10:41:56 http: TLS handshake error from 10.129.0.1:52492: EOF level=info timestamp=2018-07-26T10:42:00.120655Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:42:06 http: TLS handshake error from 10.129.0.1:52500: EOF 2018/07/26 10:42:16 http: TLS handshake error from 10.129.0.1:52508: EOF 2018/07/26 10:42:26 http: TLS handshake error from 10.129.0.1:52516: EOF level=info timestamp=2018-07-26T10:42:30.156885Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:42:36 http: TLS handshake error from 10.129.0.1:52524: EOF 2018/07/26 10:42:46 http: TLS handshake error from 10.129.0.1:52532: EOF 2018/07/26 10:42:56 http: TLS handshake error from 10.129.0.1:52540: EOF level=info timestamp=2018-07-26T10:43:00.158585Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:43:06 http: TLS handshake error from 10.129.0.1:52548: EOF 2018/07/26 10:43:16 http: TLS handshake error from 10.129.0.1:52556: EOF 2018/07/26 10:43:26 http: TLS handshake error from 10.129.0.1:52564: EOF 2018/07/26 10:43:36 http: TLS handshake error from 10.129.0.1:52572: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T10:42:53.745489Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:42:55.505023Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:42:57.810799Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:42:57 http: TLS handshake error from 10.129.0.1:58852: EOF level=info timestamp=2018-07-26T10:43:05.551738Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:43:07 http: TLS handshake error from 10.129.0.1:58860: EOF level=info timestamp=2018-07-26T10:43:15.601702Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:43:17 http: TLS handshake error from 10.129.0.1:58868: EOF level=info timestamp=2018-07-26T10:43:23.739668Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:43:23.800292Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:43:25.645723Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:43:27.856403Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:43:27 http: TLS handshake error from 10.129.0.1:58876: EOF level=info timestamp=2018-07-26T10:43:30.155330Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T10:43:35.694354Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:37:35.061537Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiw8zj2 kind= uid=e8f5cb9c-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:37:35.074778Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidm8n4 kind= uid=e8f7dc25-90bf-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:37:35.074827Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidm8n4 kind= uid=e8f7dc25-90bf-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:37:35.125406Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijdlbn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijdlbn" level=info timestamp=2018-07-26T10:37:35.860682Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidm8n4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidm8n4" level=info timestamp=2018-07-26T10:37:36.290340Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidm8n4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidm8n4" level=info timestamp=2018-07-26T10:40:36.917515Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6t77 kind= uid=555aaeed-90c0-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:40:36.917685Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6t77 kind= uid=555aaeed-90c0-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:40:36.932185Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizwmfj kind= uid=555d205b-90c0-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:40:36.932267Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizwmfj kind= uid=555d205b-90c0-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:40:36.940257Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihf65c kind= uid=555f5cd8-90c0-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:40:36.940312Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihf65c kind= uid=555f5cd8-90c0-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:40:36.962618Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij8ll7 kind= uid=5560ebb3-90c0-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:40:36.962703Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij8ll7 kind= uid=5560ebb3-90c0-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:40:37.287268Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizwmfj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizwmfj" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T10:25:53.333696Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:25:55.893991Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:55.894536Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:55.894690Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:01.015023Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:26:01.015731Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:26:01.016570Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:11.257627Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.272127Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:11.275587Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.275946Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.259865Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.260482Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.352702Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.355901Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T10:41:22.861314Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmihf65c kind= uid=555f5cd8-90c0-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:41:22.863004Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmihf65c kind= uid=555f5cd8-90c0-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:41:22.863153Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmihf65c kind= uid=555f5cd8-90c0-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:41:23.781109Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmizwmfj kind= uid=555d205b-90c0-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:41:23.791091Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmizwmfj kind= uid=555d205b-90c0-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:41:23.791706Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmizwmfj kind= uid=555d205b-90c0-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:41:23.791835Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmizwmfj kind= uid=555d205b-90c0-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:41:24.054858Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmiv6t77 kind= uid=555aaeed-90c0-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:41:24.067599Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiv6t77 kind= uid=555aaeed-90c0-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:41:24.068733Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmiv6t77 kind= uid=555aaeed-90c0-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:41:24.068787Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiv6t77 kind= uid=555aaeed-90c0-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:41:25.393753Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmij8ll7 kind= uid=5560ebb3-90c0-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:41:25.410601Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmij8ll7 kind= uid=5560ebb3-90c0-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:41:25.415613Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmij8ll7 kind= uid=5560ebb3-90c0-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:41:25.415753Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmij8ll7 kind= uid=5560ebb3-90c0-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmihf65c-7zphk Pod phase: Failed goroutine 15 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42024e0e0, 0xc4200f8280, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421916040, 0xc42000fb28, 0xc42041f240, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200b2120, 0xc42000e0b8, 0x13, 0x131ed4d, 0x4, 0xc421924f18, 0x3, 0x3, 0xc420481180, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200b2120, 0xc42000e0b8, 0x13, 0xc42042cf18, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc4206d8f00) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420480f00, 0xc4200aa4b0, 0xc4207b9600, 0xc4207b9610, 0xc421948300, 0xc421908d00, 0x10f5e80, 0xc42000fb28, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmij8ll7-2r2wp Pod phase: Failed goroutine 31 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4202284c0, 0xc4201a8c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42191ad00, 0xc4200b6bb8, 0xc420378880, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c600, 0xc42000e0f8, 0x13, 0x131ed4d, 0x4, 0xc42006ef18, 0x3, 0x3, 0xc420089ac0, 0x10d25f5, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c600, 0xc42000e0f8, 0x13, 0xc4201fff18, 0x3, 0x3, 0x10ce4a6, 0x7, 0x10db1fa) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4200ba900, 0xc4200d6870, 0xc421924dd0, 0xc421924de0, 0xc42191dd80, 0xc4219260a0, 0x10f5e80, 0xc4200b6bb8, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmiv6t77-bqc4l Pod phase: Failed goroutine 13 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42027c0d0, 0xc4201a0c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421926040, 0xc4200b77b8, 0xc4202f09c0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c120, 0xc42000e078, 0x13, 0x131ed4d, 0x4, 0xc42006af18, 0x3, 0x3, 0xc4202ae780, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c120, 0xc42000e078, 0x13, 0xc42005d718, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc420503b00) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc42034e3c0, 0xc4200e63c0, 0xc4219254b0, 0xc4219254c0, 0xc42194c300, 0xc421918d00, 0x10f5e80, 0xc4200b77b8, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmizwmfj-726cd Pod phase: Failed goroutine 29 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42022a0f0, 0xc4201a8c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42191e040, 0xc42000feb0, 0xc4202d9680, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200c6660, 0xc42000e070, 0x13, 0x131ed4d, 0x4, 0xc420069f18, 0x3, 0x3, 0xc420088280, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200c6660, 0xc42000e070, 0x13, 0xc421917718, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc42065fc80) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420335980, 0xc4200d6280, 0xc4204ce340, 0xc4204ce350, 0xc42194c300, 0xc42190ad20, 0x10f5e80, 0xc42000feb0, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 • Failure in Spec Setup (BeforeEach) [181.572 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on a different node from Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Unexpected Warning event received. Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:245 ------------------------------ level=info timestamp=2018-07-26T10:40:37.503230Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmiv6t77-bqc4l" level=info timestamp=2018-07-26T10:41:03.509658Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmiv6t77-bqc4l" level=error timestamp=2018-07-26T10:41:03.888620Z pos=utils.go:241 component=tests reason="unexpected warning event received" msg="unexpected EOF" Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:42:06 http: TLS handshake error from 10.129.0.1:52500: EOF 2018/07/26 10:42:16 http: TLS handshake error from 10.129.0.1:52508: EOF 2018/07/26 10:42:26 http: TLS handshake error from 10.129.0.1:52516: EOF level=info timestamp=2018-07-26T10:42:30.156885Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:42:36 http: TLS handshake error from 10.129.0.1:52524: EOF 2018/07/26 10:42:46 http: TLS handshake error from 10.129.0.1:52532: EOF 2018/07/26 10:42:56 http: TLS handshake error from 10.129.0.1:52540: EOF level=info timestamp=2018-07-26T10:43:00.158585Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:43:06 http: TLS handshake error from 10.129.0.1:52548: EOF 2018/07/26 10:43:16 http: TLS handshake error from 10.129.0.1:52556: EOF 2018/07/26 10:43:26 http: TLS handshake error from 10.129.0.1:52564: EOF 2018/07/26 10:43:36 http: TLS handshake error from 10.129.0.1:52572: EOF 2018/07/26 10:43:46 http: TLS handshake error from 10.129.0.1:52580: EOF 2018/07/26 10:43:56 http: TLS handshake error from 10.129.0.1:52588: EOF level=info timestamp=2018-07-26T10:44:00.447276Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T10:43:27.856403Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:43:27 http: TLS handshake error from 10.129.0.1:58876: EOF level=info timestamp=2018-07-26T10:43:30.155330Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T10:43:35.694354Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:43:37 http: TLS handshake error from 10.129.0.1:58884: EOF level=info timestamp=2018-07-26T10:43:45.758317Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:43:47 http: TLS handshake error from 10.129.0.1:58892: EOF level=info timestamp=2018-07-26T10:43:53.787839Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:43:54.227860Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:43:56.003892Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:43:56.004358Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:43:56.307099Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:43:56.341335Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:43:57 http: TLS handshake error from 10.129.0.1:58900: EOF level=info timestamp=2018-07-26T10:43:58.083027Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:40:36.962703Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij8ll7 kind= uid=5560ebb3-90c0-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:40:37.287268Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizwmfj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizwmfj" level=info timestamp=2018-07-26T10:43:38.225946Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicp8m5 kind= uid=c16d3d41-90c0-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:43:38.226169Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicp8m5 kind= uid=c16d3d41-90c0-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:43:38.237567Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4xvkh kind= uid=c16ea2cf-90c0-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:43:38.237653Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4xvkh kind= uid=c16ea2cf-90c0-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:43:38.258062Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiztfhc kind= uid=c170349d-90c0-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:43:38.258153Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiztfhc kind= uid=c170349d-90c0-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:43:38.287423Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4sxsf kind= uid=c174d678-90c0-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:43:38.287604Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4sxsf kind= uid=c174d678-90c0-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:43:38.450092Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi4xvkh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi4xvkh" level=info timestamp=2018-07-26T10:43:39.246148Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiztfhc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiztfhc" level=info timestamp=2018-07-26T10:43:39.448415Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi4xvkh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi4xvkh" level=info timestamp=2018-07-26T10:43:39.670496Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi4sxsf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi4sxsf" level=info timestamp=2018-07-26T10:43:40.050193Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi4sxsf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi4sxsf" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T10:25:53.333696Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:25:55.893991Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:55.894536Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:55.894690Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:01.015023Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:26:01.015731Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:26:01.016570Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:11.257627Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.272127Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:11.275587Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.275946Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.259865Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.260482Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.352702Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.355901Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T10:44:04.379538Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi4xvkh kind= uid=c16ea2cf-90c0-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:44:04.379665Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi4xvkh kind= uid=c16ea2cf-90c0-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:44:04.379817Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4xvkh" level=info timestamp=2018-07-26T10:44:04.384888Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi4xvkh kind= uid=c16ea2cf-90c0-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:44:04.385040Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi4xvkh kind= uid=c16ea2cf-90c0-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:44:04.385145Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4xvkh" level=info timestamp=2018-07-26T10:44:04.410812Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi4xvkh kind= uid=c16ea2cf-90c0-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:44:04.411011Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi4xvkh kind= uid=c16ea2cf-90c0-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:44:04.411114Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4xvkh" level=info timestamp=2018-07-26T10:44:04.416977Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi4sxsf kind= uid=c174d678-90c0-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:44:04.417078Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi4sxsf kind= uid=c174d678-90c0-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:44:04.417195Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4sxsf" level=info timestamp=2018-07-26T10:44:04.417334Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmicp8m5 kind= uid=c16d3d41-90c0-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:44:04.417487Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmicp8m5 kind= uid=c16d3d41-90c0-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:44:04.417598Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmicp8m5" Pod name: virt-launcher-testvmi4sxsf-q496h Pod phase: Running [signal SIGSEGV: segmentation violation code=0x1 addr=0x0 pc=0x1061516] goroutine 31 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4202d65c0, 0xc42019ec80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42190ec40, 0xc4200b7d58, 0xc420393da0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc420096ba0, 0xc4200b6300, 0x13, 0x131ed4d, 0x4, 0xc420069f18, 0x3, 0x3, 0xc421a1c3c0, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc420096ba0, 0xc4200b6300, 0x13, 0xc4204fdf18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4202ae600, 0xc4200b8820, 0xc4204cac40, 0xc4204cac50, 0xc421911d00, 0xc421928120, 0x10f5e80, 0xc4200b7d58, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a Pod name: virt-launcher-testvmi4xvkh-r6jpk Pod phase: Running [signal SIGSEGV: segmentation violation code=0x1 addr=0x0 pc=0x1061516] goroutine 14 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4202dc0d0, 0xc4201b4c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc420298040, 0xc4200b7768, 0xc4209225a0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c180, 0xc42000e0a8, 0x13, 0x131ed4d, 0x4, 0xc42006df18, 0x3, 0x3, 0xc4200880c0, 0x1a, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c180, 0xc42000e0a8, 0x13, 0xc420475718, 0x3, 0x3, 0x5cc549, 0xc420594630, 0xc420437758) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc42040c6c0, 0xc4200e64b0, 0xc421687620, 0xc421687630, 0xc4202ca300, 0xc42023de80, 0x10f5e80, 0xc4200b7768, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a Pod name: virt-launcher-testvmicp8m5-4q9nk Pod phase: Running [signal SIGSEGV: segmentation violation code=0x1 addr=0x0 pc=0x1061516] goroutine 12 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4200a65e0, 0xc4201a8c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421932040, 0xc4200b7320, 0xc4202a7ae0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c120, 0xc42000e078, 0x13, 0x131ed4d, 0x4, 0xc420069f18, 0x3, 0x3, 0xc4200ba8c0, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c120, 0xc42000e078, 0x13, 0xc42194c718, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4202b8140, 0xc4200e6370, 0xc420368100, 0xc420368110, 0xc421946300, 0xc421924d40, 0x10f5e80, 0xc4200b7320, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a Pod name: virt-launcher-testvmiztfhc-qb9kk Pod phase: Running goroutine 32 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4202f0470, 0xc4201b2c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421924ca0, 0xc42000edf0, 0xc421931b80, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200c6f60, 0xc4200b6318, 0x13, 0x131ed4d, 0x4, 0xc42006ef18, 0x3, 0x3, 0xc42036a7c0, 0xc4204d2680, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200c6f60, 0xc4200b6318, 0x13, 0xc421976f18, 0x3, 0x3, 0xc4204d2b80, 0xc4204d2c00, 0xc4204d2c80) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc42036a4c0, 0xc4200d6690, 0xc42192f440, 0xc42192f450, 0xc421927e00, 0xc4219300a0, 0x10f5e80, 0xc42000edf0, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:44:46 http: TLS handshake error from 10.129.0.1:52628: EOF 2018/07/26 10:44:56 http: TLS handshake error from 10.129.0.1:52638: EOF level=info timestamp=2018-07-26T10:45:00.146102Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:45:06 http: TLS handshake error from 10.129.0.1:52646: EOF 2018/07/26 10:45:16 http: TLS handshake error from 10.129.0.1:52654: EOF 2018/07/26 10:45:26 http: TLS handshake error from 10.129.0.1:52662: EOF level=info timestamp=2018-07-26T10:45:30.161171Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:45:36 http: TLS handshake error from 10.129.0.1:52670: EOF 2018/07/26 10:45:46 http: TLS handshake error from 10.129.0.1:52678: EOF 2018/07/26 10:45:56 http: TLS handshake error from 10.129.0.1:52686: EOF 2018/07/26 10:46:06 http: TLS handshake error from 10.129.0.1:52694: EOF 2018/07/26 10:46:16 http: TLS handshake error from 10.129.0.1:52702: EOF 2018/07/26 10:46:26 http: TLS handshake error from 10.129.0.1:52710: EOF level=info timestamp=2018-07-26T10:46:30.112562Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:46:36 http: TLS handshake error from 10.129.0.1:52718: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T10:45:56.592476Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:45:57 http: TLS handshake error from 10.129.0.1:58998: EOF level=info timestamp=2018-07-26T10:45:58.417770Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:46:00.115042Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T10:46:06.660151Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:46:07 http: TLS handshake error from 10.129.0.1:59006: EOF level=info timestamp=2018-07-26T10:46:16.703839Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:46:17 http: TLS handshake error from 10.129.0.1:59014: EOF level=info timestamp=2018-07-26T10:46:24.507367Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:46:24.796595Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:46:26.749699Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:46:27 http: TLS handshake error from 10.129.0.1:59022: EOF level=info timestamp=2018-07-26T10:46:28.465207Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:46:36.790470Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:46:37 http: TLS handshake error from 10.129.0.1:59030: EOF Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:40:36.962703Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij8ll7 kind= uid=5560ebb3-90c0-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:40:37.287268Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizwmfj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizwmfj" level=info timestamp=2018-07-26T10:43:38.225946Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicp8m5 kind= uid=c16d3d41-90c0-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:43:38.226169Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicp8m5 kind= uid=c16d3d41-90c0-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:43:38.237567Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4xvkh kind= uid=c16ea2cf-90c0-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:43:38.237653Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4xvkh kind= uid=c16ea2cf-90c0-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:43:38.258062Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiztfhc kind= uid=c170349d-90c0-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:43:38.258153Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiztfhc kind= uid=c170349d-90c0-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:43:38.287423Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4sxsf kind= uid=c174d678-90c0-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:43:38.287604Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4sxsf kind= uid=c174d678-90c0-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:43:38.450092Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi4xvkh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi4xvkh" level=info timestamp=2018-07-26T10:43:39.246148Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiztfhc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiztfhc" level=info timestamp=2018-07-26T10:43:39.448415Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi4xvkh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi4xvkh" level=info timestamp=2018-07-26T10:43:39.670496Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi4sxsf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi4sxsf" level=info timestamp=2018-07-26T10:43:40.050193Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi4sxsf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi4sxsf" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T10:25:53.333696Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:25:55.893991Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:55.894536Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:55.894690Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:01.015023Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:26:01.015731Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:26:01.016570Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:11.257627Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.272127Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:11.275587Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.275946Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.259865Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.260482Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.352702Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.355901Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T10:44:24.757752Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmicp8m5 kind= uid=c16d3d41-90c0-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:44:24.778607Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicp8m5 kind= uid=c16d3d41-90c0-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:44:24.778717Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmicp8m5 kind= uid=c16d3d41-90c0-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:44:24.778764Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicp8m5 kind= uid=c16d3d41-90c0-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:44:24.783091Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi4sxsf kind= uid=c174d678-90c0-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:44:24.783159Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi4sxsf kind= uid=c174d678-90c0-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:44:24.783204Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi4sxsf kind= uid=c174d678-90c0-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:44:24.877663Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi4xvkh kind= uid=c16ea2cf-90c0-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:44:24.888035Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi4xvkh kind= uid=c16ea2cf-90c0-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:44:24.888139Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi4xvkh kind= uid=c16ea2cf-90c0-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:44:24.888182Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi4xvkh kind= uid=c16ea2cf-90c0-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:44:25.052993Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmiztfhc kind= uid=c170349d-90c0-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:44:25.064266Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiztfhc kind= uid=c170349d-90c0-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:44:25.065565Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmiztfhc kind= uid=c170349d-90c0-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:44:25.065654Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiztfhc kind= uid=c170349d-90c0-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi4sxsf-q496h Pod phase: Failed goroutine 31 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4202d65c0, 0xc42019ec80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42190ec40, 0xc4200b7d58, 0xc420393da0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc420096ba0, 0xc4200b6300, 0x13, 0x131ed4d, 0x4, 0xc420069f18, 0x3, 0x3, 0xc421a1c3c0, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc420096ba0, 0xc4200b6300, 0x13, 0xc4204fdf18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4202ae600, 0xc4200b8820, 0xc4204cac40, 0xc4204cac50, 0xc421911d00, 0xc421928120, 0x10f5e80, 0xc4200b7d58, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmi4xvkh-r6jpk Pod phase: Failed goroutine 14 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4202dc0d0, 0xc4201b4c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc420298040, 0xc4200b7768, 0xc4209225a0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c180, 0xc42000e0a8, 0x13, 0x131ed4d, 0x4, 0xc42006df18, 0x3, 0x3, 0xc4200880c0, 0x1a, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c180, 0xc42000e0a8, 0x13, 0xc420475718, 0x3, 0x3, 0x5cc549, 0xc420594630, 0xc420437758) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc42040c6c0, 0xc4200e64b0, 0xc421687620, 0xc421687630, 0xc4202ca300, 0xc42023de80, 0x10f5e80, 0xc4200b7768, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmicp8m5-4q9nk Pod phase: Failed goroutine 12 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4200a65e0, 0xc4201a8c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421932040, 0xc4200b7320, 0xc4202a7ae0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c120, 0xc42000e078, 0x13, 0x131ed4d, 0x4, 0xc420069f18, 0x3, 0x3, 0xc4200ba8c0, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c120, 0xc42000e078, 0x13, 0xc42194c718, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4202b8140, 0xc4200e6370, 0xc420368100, 0xc420368110, 0xc421946300, 0xc421924d40, 0x10f5e80, 0xc4200b7320, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmiztfhc-qb9kk Pod phase: Failed goroutine 32 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4202f0470, 0xc4201b2c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421924ca0, 0xc42000edf0, 0xc421931b80, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200c6f60, 0xc4200b6318, 0x13, 0x131ed4d, 0x4, 0xc42006ef18, 0x3, 0x3, 0xc42036a7c0, 0xc4204d2680, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200c6f60, 0xc4200b6318, 0x13, 0xc421976f18, 0x3, 0x3, 0xc4204d2b80, 0xc4204d2c00, 0xc4204d2c80) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc42036a4c0, 0xc4200d6690, 0xc42192f440, 0xc42192f450, 0xc421927e00, 0xc4219300a0, 0x10f5e80, 0xc42000edf0, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 • Failure in Spec Setup (BeforeEach) [181.490 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on the same node from Node /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Unexpected Warning event received. Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:245 ------------------------------ level=info timestamp=2018-07-26T10:43:38.877941Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmicp8m5-4q9nk" level=info timestamp=2018-07-26T10:44:03.865832Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmicp8m5-4q9nk" level=error timestamp=2018-07-26T10:44:04.571314Z pos=utils.go:241 component=tests reason="unexpected warning event received" msg="unexpected EOF" Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:45:06 http: TLS handshake error from 10.129.0.1:52646: EOF 2018/07/26 10:45:16 http: TLS handshake error from 10.129.0.1:52654: EOF 2018/07/26 10:45:26 http: TLS handshake error from 10.129.0.1:52662: EOF level=info timestamp=2018-07-26T10:45:30.161171Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:45:36 http: TLS handshake error from 10.129.0.1:52670: EOF 2018/07/26 10:45:46 http: TLS handshake error from 10.129.0.1:52678: EOF 2018/07/26 10:45:56 http: TLS handshake error from 10.129.0.1:52686: EOF 2018/07/26 10:46:06 http: TLS handshake error from 10.129.0.1:52694: EOF 2018/07/26 10:46:16 http: TLS handshake error from 10.129.0.1:52702: EOF 2018/07/26 10:46:26 http: TLS handshake error from 10.129.0.1:52710: EOF level=info timestamp=2018-07-26T10:46:30.112562Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:46:36 http: TLS handshake error from 10.129.0.1:52718: EOF 2018/07/26 10:46:46 http: TLS handshake error from 10.129.0.1:52726: EOF 2018/07/26 10:46:56 http: TLS handshake error from 10.129.0.1:52734: EOF 2018/07/26 10:47:06 http: TLS handshake error from 10.129.0.1:52742: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running 2018/07/26 10:46:27 http: TLS handshake error from 10.129.0.1:59022: EOF level=info timestamp=2018-07-26T10:46:28.465207Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:46:36.790470Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:46:37 http: TLS handshake error from 10.129.0.1:59030: EOF level=info timestamp=2018-07-26T10:46:47.249213Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:46:47 http: TLS handshake error from 10.129.0.1:59038: EOF level=info timestamp=2018-07-26T10:46:54.623853Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:46:54.855296Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:46:57.060529Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:46:57.131346Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:46:57.219357Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:46:57.571709Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:46:57 http: TLS handshake error from 10.129.0.1:59046: EOF level=info timestamp=2018-07-26T10:46:58.509552Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:47:01.048210Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:43:39.246148Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiztfhc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiztfhc" level=info timestamp=2018-07-26T10:43:39.448415Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi4xvkh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi4xvkh" level=info timestamp=2018-07-26T10:43:39.670496Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi4sxsf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi4sxsf" level=info timestamp=2018-07-26T10:43:40.050193Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi4sxsf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi4sxsf" level=info timestamp=2018-07-26T10:46:40.144837Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim4n8p kind= uid=2ddbad2c-90c1-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:46:40.145182Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim4n8p kind= uid=2ddbad2c-90c1-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:46:40.374039Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirt7jx kind= uid=2ddd356e-90c1-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:46:40.374206Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirt7jx kind= uid=2ddd356e-90c1-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:46:40.406264Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifbsx7 kind= uid=2e0008d4-90c1-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:46:40.406413Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifbsx7 kind= uid=2e0008d4-90c1-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:46:40.417563Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5br2n kind= uid=2e03bdd7-90c1-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:46:40.417631Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5br2n kind= uid=2e03bdd7-90c1-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:46:40.479188Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmim4n8p\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmim4n8p" level=info timestamp=2018-07-26T10:46:40.614821Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmim4n8p\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmim4n8p" level=info timestamp=2018-07-26T10:46:41.219377Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5br2n\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5br2n" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T10:25:53.333696Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:25:55.893991Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:55.894536Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:55.894690Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:01.015023Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:26:01.015731Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:26:01.016570Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:11.257627Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.272127Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:11.275587Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.275946Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.259865Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.260482Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.352702Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.355901Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=error timestamp=2018-07-26T10:47:07.274976Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi5br2n kind= uid=2e03bdd7-90c1-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:47:07.275098Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi5br2n" level=info timestamp=2018-07-26T10:47:07.275224Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi5br2n kind= uid=2e03bdd7-90c1-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:47:07.275324Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi5br2n kind= uid=2e03bdd7-90c1-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:47:07.275427Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi5br2n" level=info timestamp=2018-07-26T10:47:07.302870Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi5br2n kind= uid=2e03bdd7-90c1-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:47:07.303603Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi5br2n kind= uid=2e03bdd7-90c1-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:47:07.303712Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi5br2n" level=info timestamp=2018-07-26T10:47:07.351025Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi5br2n kind= uid=2e03bdd7-90c1-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:47:07.351471Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi5br2n kind= uid=2e03bdd7-90c1-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:47:07.351673Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi5br2n" level=info timestamp=2018-07-26T10:47:07.363959Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmim4n8p kind= uid=2ddbad2c-90c1-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:47:07.364433Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmim4n8p kind= uid=2ddbad2c-90c1-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:47:07.364682Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmim4n8p" level=error timestamp=2018-07-26T10:47:07.411749Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmifbsx7 kind= uid=2e0008d4-90c1-11e8-8e98-525500d15501 reason="unexpected EOF" msg="Synchronizing the VirtualMachineInstance failed." Pod name: virt-launcher-testvmi5br2n-jgh9k Pod phase: Running [signal SIGSEGV: segmentation violation code=0x1 addr=0x0 pc=0x1061516] goroutine 29 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4202d60b0, 0xc42019ec80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421954040, 0xc4200b7518, 0xc4202a0760, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc420096660, 0xc4200b6698, 0x13, 0x131ed4d, 0x4, 0xc42006ff18, 0x3, 0x3, 0xc4200ba080, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc420096660, 0xc4200b6698, 0x13, 0xc421977f18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420404040, 0xc4200b8780, 0xc4203c15f0, 0xc4203c1600, 0xc421970300, 0xc4219aa4a0, 0x10f5e80, 0xc4200b7518, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a Pod name: virt-launcher-testvmifbsx7-cqwb5 Pod phase: Running [signal SIGSEGV: segmentation violation code=0x1 addr=0x0 pc=0x1061516] goroutine 15 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42027c0d0, 0xc4201a0c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421918040, 0xc42000f430, 0xc42044cf20, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c1e0, 0xc42000e078, 0x13, 0x131ed4d, 0x4, 0xc420069f18, 0x3, 0x3, 0xc421956040, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c1e0, 0xc42000e078, 0x13, 0xc42190cf18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420406040, 0xc4200e6190, 0xc4202d4e00, 0xc4202d4e10, 0xc421944300, 0xc421900d40, 0x10f5e80, 0xc42000f430, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a Pod name: virt-launcher-testvmim4n8p-qd74r Pod phase: Running goroutine 30 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42023a0d0, 0xc4201b4c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc4206a4040, 0xc420c644f0, 0xc420c84b00, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c060, 0xc42000e070, 0x13, 0x131ed4d, 0x4, 0xc420069f18, 0x3, 0x3, 0xc42034a0c0, 0x24, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c060, 0xc42000e070, 0x13, 0xc4206c9718, 0x3, 0x3, 0x5cc549, 0xc4202b8510, 0xc42070f5f8) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc42052e300, 0xc4200e61e0, 0xc420c7a010, 0xc420c7a020, 0xc4206c0300, 0xc42069a800, 0x10f5e80, 0xc420c644f0, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmirt7jx-vn9gg Pod phase: Running [signal SIGSEGV: segmentation violation code=0x1 addr=0x0 pc=0x1061516] goroutine 31 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4200460c0, 0xc4201a0c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42192c040, 0xc4200b7620, 0xc420247860, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c1e0, 0xc42000e0b0, 0x13, 0x131ed4d, 0x4, 0xc42006df18, 0x3, 0x3, 0xc4200bb7c0, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c1e0, 0xc42000e0b0, 0x13, 0xc42023a718, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc4200bb640) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4202aec00, 0xc4200e6460, 0xc420244070, 0xc420244090, 0xc421956300, 0xc42191ed60, 0x10f5e80, 0xc4200b7620, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:47:46 http: TLS handshake error from 10.129.0.1:52774: EOF 2018/07/26 10:47:56 http: TLS handshake error from 10.129.0.1:52784: EOF 2018/07/26 10:48:06 http: TLS handshake error from 10.129.0.1:52792: EOF 2018/07/26 10:48:16 http: TLS handshake error from 10.129.0.1:52800: EOF 2018/07/26 10:48:26 http: TLS handshake error from 10.129.0.1:52808: EOF level=info timestamp=2018-07-26T10:48:30.152695Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:48:36 http: TLS handshake error from 10.129.0.1:52816: EOF 2018/07/26 10:48:46 http: TLS handshake error from 10.129.0.1:52824: EOF 2018/07/26 10:48:56 http: TLS handshake error from 10.129.0.1:52832: EOF level=info timestamp=2018-07-26T10:49:00.109631Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:49:06 http: TLS handshake error from 10.129.0.1:52840: EOF 2018/07/26 10:49:16 http: TLS handshake error from 10.129.0.1:52848: EOF 2018/07/26 10:49:26 http: TLS handshake error from 10.129.0.1:52856: EOF level=info timestamp=2018-07-26T10:49:30.139384Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:49:36 http: TLS handshake error from 10.129.0.1:52864: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running level=info timestamp=2018-07-26T10:48:55.196417Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:48:57 http: TLS handshake error from 10.129.0.1:59144: EOF level=info timestamp=2018-07-26T10:48:58.156429Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:48:58.971347Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:49:07 http: TLS handshake error from 10.129.0.1:59152: EOF level=info timestamp=2018-07-26T10:49:08.205108Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:49:17 http: TLS handshake error from 10.129.0.1:59160: EOF level=info timestamp=2018-07-26T10:49:18.252862Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:49:25.146030Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:49:25.255148Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:49:27 http: TLS handshake error from 10.129.0.1:59168: EOF level=info timestamp=2018-07-26T10:49:28.303140Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:49:29.021282Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:49:37 http: TLS handshake error from 10.129.0.1:59176: EOF level=info timestamp=2018-07-26T10:49:38.353237Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:43:39.246148Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiztfhc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiztfhc" level=info timestamp=2018-07-26T10:43:39.448415Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi4xvkh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi4xvkh" level=info timestamp=2018-07-26T10:43:39.670496Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi4sxsf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi4sxsf" level=info timestamp=2018-07-26T10:43:40.050193Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi4sxsf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi4sxsf" level=info timestamp=2018-07-26T10:46:40.144837Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim4n8p kind= uid=2ddbad2c-90c1-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:46:40.145182Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim4n8p kind= uid=2ddbad2c-90c1-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:46:40.374039Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirt7jx kind= uid=2ddd356e-90c1-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:46:40.374206Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirt7jx kind= uid=2ddd356e-90c1-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:46:40.406264Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifbsx7 kind= uid=2e0008d4-90c1-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:46:40.406413Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifbsx7 kind= uid=2e0008d4-90c1-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:46:40.417563Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5br2n kind= uid=2e03bdd7-90c1-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:46:40.417631Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5br2n kind= uid=2e03bdd7-90c1-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:46:40.479188Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmim4n8p\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmim4n8p" level=info timestamp=2018-07-26T10:46:40.614821Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmim4n8p\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmim4n8p" level=info timestamp=2018-07-26T10:46:41.219377Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5br2n\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5br2n" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T10:25:53.333696Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:25:55.893991Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:55.894536Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:55.894690Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:01.015023Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:26:01.015731Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:26:01.016570Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:11.257627Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.272127Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:11.275587Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.275946Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.259865Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.260482Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.352702Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.355901Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T10:47:27.761939Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmim4n8p kind= uid=2ddbad2c-90c1-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:47:27.762125Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmim4n8p kind= uid=2ddbad2c-90c1-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:47:27.762179Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmim4n8p kind= uid=2ddbad2c-90c1-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:47:27.776167Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi5br2n kind= uid=2e03bdd7-90c1-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:47:27.785982Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5br2n kind= uid=2e03bdd7-90c1-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:47:27.786134Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi5br2n kind= uid=2e03bdd7-90c1-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:47:27.786181Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5br2n kind= uid=2e03bdd7-90c1-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:47:28.008812Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmifbsx7 kind= uid=2e0008d4-90c1-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:47:28.021431Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmifbsx7 kind= uid=2e0008d4-90c1-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:47:28.023323Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmifbsx7 kind= uid=2e0008d4-90c1-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:47:28.023420Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmifbsx7 kind= uid=2e0008d4-90c1-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:47:28.501267Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirt7jx kind= uid=2ddd356e-90c1-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:47:28.512355Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirt7jx kind= uid=2ddd356e-90c1-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:47:28.514063Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirt7jx kind= uid=2ddd356e-90c1-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:47:28.514308Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirt7jx kind= uid=2ddd356e-90c1-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi5br2n-jgh9k Pod phase: Failed goroutine 29 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4202d60b0, 0xc42019ec80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421954040, 0xc4200b7518, 0xc4202a0760, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc420096660, 0xc4200b6698, 0x13, 0x131ed4d, 0x4, 0xc42006ff18, 0x3, 0x3, 0xc4200ba080, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc420096660, 0xc4200b6698, 0x13, 0xc421977f18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420404040, 0xc4200b8780, 0xc4203c15f0, 0xc4203c1600, 0xc421970300, 0xc4219aa4a0, 0x10f5e80, 0xc4200b7518, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmifbsx7-cqwb5 Pod phase: Failed goroutine 15 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42027c0d0, 0xc4201a0c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421918040, 0xc42000f430, 0xc42044cf20, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c1e0, 0xc42000e078, 0x13, 0x131ed4d, 0x4, 0xc420069f18, 0x3, 0x3, 0xc421956040, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c1e0, 0xc42000e078, 0x13, 0xc42190cf18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc420406040, 0xc4200e6190, 0xc4202d4e00, 0xc4202d4e10, 0xc421944300, 0xc421900d40, 0x10f5e80, 0xc42000f430, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmim4n8p-qd74r Pod phase: Failed goroutine 30 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42023a0d0, 0xc4201b4c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc4206a4040, 0xc420c644f0, 0xc420c84b00, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c060, 0xc42000e070, 0x13, 0x131ed4d, 0x4, 0xc420069f18, 0x3, 0x3, 0xc42034a0c0, 0x24, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c060, 0xc42000e070, 0x13, 0xc4206c9718, 0x3, 0x3, 0x5cc549, 0xc4202b8510, 0xc42070f5f8) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc42052e300, 0xc4200e61e0, 0xc420c7a010, 0xc420c7a020, 0xc4206c0300, 0xc42069a800, 0x10f5e80, 0xc420c644f0, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmirt7jx-vn9gg Pod phase: Failed goroutine 31 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4200460c0, 0xc4201a0c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc42192c040, 0xc4200b7620, 0xc420247860, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c1e0, 0xc42000e0b0, 0x13, 0x131ed4d, 0x4, 0xc42006df18, 0x3, 0x3, 0xc4200bb7c0, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c1e0, 0xc42000e0b0, 0x13, 0xc42023a718, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc4200bb640) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4202aec00, 0xc4200e6460, 0xc420244070, 0xc420244090, 0xc421956300, 0xc42191ed60, 0x10f5e80, 0xc4200b7620, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 • Failure in Spec Setup (BeforeEach) [181.997 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on a different node from Node /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Unexpected Warning event received. Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:245 ------------------------------ level=info timestamp=2018-07-26T10:46:40.973092Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmim4n8p-qd74r" level=info timestamp=2018-07-26T10:47:06.962286Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmim4n8p-qd74r" level=error timestamp=2018-07-26T10:47:07.553015Z pos=utils.go:241 component=tests reason="unexpected warning event received" msg="unexpected EOF" Pod name: disks-images-provider-9tfn5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-wqlgh Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-85l9v Pod phase: Running 2018/07/26 10:48:26 http: TLS handshake error from 10.129.0.1:52808: EOF level=info timestamp=2018-07-26T10:48:30.152695Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:48:36 http: TLS handshake error from 10.129.0.1:52816: EOF 2018/07/26 10:48:46 http: TLS handshake error from 10.129.0.1:52824: EOF 2018/07/26 10:48:56 http: TLS handshake error from 10.129.0.1:52832: EOF level=info timestamp=2018-07-26T10:49:00.109631Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:49:06 http: TLS handshake error from 10.129.0.1:52840: EOF 2018/07/26 10:49:16 http: TLS handshake error from 10.129.0.1:52848: EOF 2018/07/26 10:49:26 http: TLS handshake error from 10.129.0.1:52856: EOF level=info timestamp=2018-07-26T10:49:30.139384Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:49:36 http: TLS handshake error from 10.129.0.1:52864: EOF 2018/07/26 10:49:46 http: TLS handshake error from 10.129.0.1:52872: EOF 2018/07/26 10:49:56 http: TLS handshake error from 10.129.0.1:52880: EOF level=info timestamp=2018-07-26T10:50:01.405127Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 10:50:06 http: TLS handshake error from 10.129.0.1:52888: EOF Pod name: virt-api-7d79764579-g7k67 Pod phase: Running 2018/07/26 10:49:37 http: TLS handshake error from 10.129.0.1:59176: EOF level=info timestamp=2018-07-26T10:49:38.353237Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:49:44.075746Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T10:49:44.084127Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/26 10:49:47 http: TLS handshake error from 10.129.0.1:59184: EOF level=info timestamp=2018-07-26T10:49:48.498050Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:49:55.709073Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:49:55.791942Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:49:57.928647Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:49:57.945369Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:49:57 http: TLS handshake error from 10.129.0.1:59192: EOF level=info timestamp=2018-07-26T10:49:57.962142Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:49:58.682142Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T10:49:59.080641Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 10:50:07 http: TLS handshake error from 10.129.0.1:59200: EOF Pod name: virt-controller-7d57d96b65-frkzz Pod phase: Running level=info timestamp=2018-07-26T10:46:40.406413Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifbsx7 kind= uid=2e0008d4-90c1-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:46:40.417563Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5br2n kind= uid=2e03bdd7-90c1-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:46:40.417631Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5br2n kind= uid=2e03bdd7-90c1-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:46:40.479188Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmim4n8p\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmim4n8p" level=info timestamp=2018-07-26T10:46:40.614821Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmim4n8p\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmim4n8p" level=info timestamp=2018-07-26T10:46:41.219377Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5br2n\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5br2n" level=info timestamp=2018-07-26T10:49:41.914197Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimjl7t kind= uid=9a334c71-90c1-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:49:41.914386Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimjl7t kind= uid=9a334c71-90c1-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:49:41.925481Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvminkvzd kind= uid=9a34dffd-90c1-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:49:41.925582Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvminkvzd kind= uid=9a34dffd-90c1-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:49:41.943834Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7l84r kind= uid=9a36c435-90c1-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:49:41.943955Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7l84r kind= uid=9a36c435-90c1-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:49:41.966700Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4kgvm kind= uid=9a39d701-90c1-11e8-8e98-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T10:49:41.966795Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4kgvm kind= uid=9a39d701-90c1-11e8-8e98-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T10:49:42.619523Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7l84r\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7l84r" Pod name: virt-controller-7d57d96b65-r4kvx Pod phase: Running level=info timestamp=2018-07-26T09:19:31.543073Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-6qv78 Pod phase: Running level=info timestamp=2018-07-26T10:25:53.333696Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:25:55.893991Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:25:55.894536Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:25:55.894690Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:01.015023Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:26:01.015731Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:26:01.016570Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fwm4" level=info timestamp=2018-07-26T10:26:11.257627Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.272127Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:26:11.275587Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T10:26:11.275946Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.259865Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.260482Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind= uid=380ddce7-90be-11e8-8e98-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T10:28:30.352702Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T10:28:30.355901Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9fwm4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qsw9k Pod phase: Running level=info timestamp=2018-07-26T10:50:08.116952Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmimjl7t kind= uid=9a334c71-90c1-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:50:08.117087Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmimjl7t kind= uid=9a334c71-90c1-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:50:08.117178Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmimjl7t" level=info timestamp=2018-07-26T10:50:08.122013Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmimjl7t kind= uid=9a334c71-90c1-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:50:08.122139Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmimjl7t kind= uid=9a334c71-90c1-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:50:08.122239Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmimjl7t" level=info timestamp=2018-07-26T10:50:08.142667Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmimjl7t kind= uid=9a334c71-90c1-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:50:08.142889Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmimjl7t kind= uid=9a334c71-90c1-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:50:08.143006Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmimjl7t" level=info timestamp=2018-07-26T10:50:08.146704Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi4kgvm kind= uid=9a39d701-90c1-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:50:08.146816Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmi4kgvm kind= uid=9a39d701-90c1-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:50:08.146926Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4kgvm" level=info timestamp=2018-07-26T10:50:08.183536Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmimjl7t kind= uid=9a334c71-90c1-11e8-8e98-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T10:50:08.183737Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmimjl7t kind= uid=9a334c71-90c1-11e8-8e98-525500d15501 reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T10:50:08.183840Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmimjl7t" Pod name: virt-launcher-testvmi4kgvm-44nbq Pod phase: Running goroutine 31 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42022c0f0, 0xc4201aac80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421936040, 0xc42000fe48, 0xc42037eb20, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc4200fe000, 0xc42000e070, 0x13, 0x131ed4d, 0x4, 0xc42006af18, 0x3, 0x3, 0xc420088080, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc4200fe000, 0xc42000e070, 0x13, 0xc42195ff18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4202a1200, 0xc4200d4230, 0xc4219efa60, 0xc4219efa70, 0xc421954300, 0xc42192ad40, 0x10f5e80, 0xc42000fe48, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvmi7l84r-vnhjd Pod phase: Running level=info timestamp=2018-07-26T10:49:49.365767Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-26T10:49:49.369605Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-26T10:49:49.377216Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-26T10:50:05.856294Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-26T10:50:05.930526Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi7l84r" level=info timestamp=2018-07-26T10:50:05.936013Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-26T10:50:05.936182Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmimjl7t-x7ksd Pod phase: Running goroutine 28 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc42029e0b0, 0xc4201bac80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc4203b20e0, 0xc420614470, 0xc421960d80, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c120, 0xc4200b60a0, 0x13, 0x131ed4d, 0x4, 0xc42006ff18, 0x3, 0x3, 0xc4200ba080, 0x0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c120, 0xc4200b60a0, 0x13, 0xc420058f18, 0x3, 0x3, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc4202d4040, 0xc4200b8500, 0xc42078f540, 0xc42078f550, 0xc421935d00, 0xc4204ac7a0, 0x10f5e80, 0xc420614470, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a virt-launcher exited with code 2 Pod name: virt-launcher-testvminkvzd-8kh9t Pod phase: Running [signal SIGSEGV: segmentation violation code=0x1 addr=0x0 pc=0x1061516] goroutine 13 [running]: kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap.(*LibvirtDomainManager).SyncVMI(0xc4201fc0b0, 0xc4201a0c80, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/manager.go:163 +0x506 kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server.(*Launcher).Sync(0xc421936040, 0xc4204ea118, 0xc42000db60, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/cmd-server/server.go:66 +0xe7 reflect.Value.call(0xc42008c1e0, 0xc4200b6098, 0x13, 0x131ed4d, 0x4, 0xc42006af18, 0x3, 0x3, 0xc4200ba280, 0x114acc0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0xc42008c1e0, 0xc4200b6098, 0x13, 0xc42192c718, 0x3, 0x3, 0x126c760, 0x12b7901, 0xc421a0e1c0) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 net/rpc.(*service).call(0xc42034e0c0, 0xc4200b85a0, 0xc4202fa0c0, 0xc4202fa0d0, 0xc42195a300, 0xc421920d20, 0x10f5e80, 0xc4204ea118, 0x16, 0x10f5f00, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:384 +0x14e created by net/rpc.(*Server).ServeCodec /gimme/.gimme/versions/go1.10.linux.amd64/src/net/rpc/server.go:480 +0x43a panic: test timed out after 1h30m0s goroutine 10768 [running]: testing.(*M).startAlarm.func1() /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1240 +0xfc created by time.goFunc /gimme/.gimme/versions/go1.10.linux.amd64/src/time/sleep.go:172 +0x44 goroutine 1 [chan receive, 90 minutes]: testing.(*T).Run(0xc42063ae10, 0x139b503, 0x9, 0x142d848, 0x4801e6) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:825 +0x301 testing.runTests.func1(0xc42063ad20) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1063 +0x64 testing.tRunner(0xc42063ad20, 0xc4208f3df8) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:777 +0xd0 testing.runTests(0xc42011e840, 0x1d2da50, 0x1, 0x1, 0x412009) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1061 +0x2c4 testing.(*M).Run(0xc42094a880, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:978 +0x171 main.main() _testmain.go:44 +0x151 goroutine 5 [chan receive]: kubevirt.io/kubevirt/vendor/github.com/golang/glog.(*loggingT).flushDaemon(0x1d59280) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/golang/glog/glog.go:879 +0x8b created by kubevirt.io/kubevirt/vendor/github.com/golang/glog.init.0 /root/go/src/kubevirt.io/kubevirt/vendor/github.com/golang/glog/glog.go:410 +0x203 goroutine 6 [syscall, 90 minutes]: os/signal.signal_recv(0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/sigqueue.go:139 +0xa6 os/signal.loop() /gimme/.gimme/versions/go1.10.linux.amd64/src/os/signal/signal_unix.go:22 +0x22 created by os/signal.init.0 /gimme/.gimme/versions/go1.10.linux.amd64/src/os/signal/signal_unix.go:28 +0x41 goroutine 10 [sleep]: time.Sleep(0xb0b5119) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/time.go:102 +0x166 kubevirt.io/kubevirt/vendor/k8s.io/client-go/util/flowcontrol.realClock.Sleep(0xb0b5119) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/client-go/util/flowcontrol/throttle.go:66 +0x2b kubevirt.io/kubevirt/vendor/k8s.io/client-go/util/flowcontrol.(*tokenBucketRateLimiter).Accept(0xc42062ebc0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/client-go/util/flowcontrol/throttle.go:91 +0xbd kubevirt.io/kubevirt/vendor/k8s.io/client-go/rest.(*Request).tryThrottle(0xc420834a80) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/client-go/rest/request.go:478 +0x1fd kubevirt.io/kubevirt/vendor/k8s.io/client-go/rest.(*Request).Do(0xc420834a80, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/client-go/rest/request.go:733 +0x62 kubevirt.io/kubevirt/pkg/kubecli.(*vmis).Get(0xc4204efb60, 0xc420c9bc40, 0xc, 0xc420b52d00, 0xc4204efb60, 0x8, 0x7ff1bef4a458) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:369 +0x125 kubevirt.io/kubevirt/tests.waitForVMIStart.func1(0x0) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1039 +0xc2 reflect.Value.call(0x11881e0, 0xc4204419b0, 0x13, 0x13955ce, 0x4, 0xc420cb6cb0, 0x0, 0x0, 0x11881e0, 0x11881e0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0x11881e0, 0xc4204419b0, 0x13, 0xc420cb6cb0, 0x0, 0x0, 0x44b21b, 0xc42084a0a8, 0xc420cb6ce8) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion.(*AsyncAssertion).pollActual(0xc420540b00, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion/async_assertion.go:71 +0x9f kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion.(*AsyncAssertion).match(0xc420540b00, 0x14c1340, 0xc420604640, 0x412801, 0xc420604670, 0x1, 0x1, 0xc420604670) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion/async_assertion.go:141 +0x305 kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion.(*AsyncAssertion).Should(0xc420540b00, 0x14c1340, 0xc420604640, 0xc420604670, 0x1, 0x1, 0xc420540b00) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion/async_assertion.go:48 +0x62 kubevirt.io/kubevirt/tests.waitForVMIStart(0x14b7060, 0xc420d10280, 0x5a, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1049 +0x6d6 kubevirt.io/kubevirt/tests.WaitForSuccessfulVMIStart(0x14b7060, 0xc420d10280, 0xc4204d8500, 0xc420c35118) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1072 +0x43 kubevirt.io/kubevirt/tests_test.glob..func18.3(0xc420d10280, 0x142d820, 0x1d77938) /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:86 +0x4d kubevirt.io/kubevirt/tests_test.glob..func18.5() /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:150 +0x745 kubevirt.io/kubevirt/tests.BeforeAll.func1() /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1343 +0x3f kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*runner).runSync(0xc420949da0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/runner.go:113 +0x9c kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*runner).run(0xc420949da0, 0xc4207f1a30, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/runner.go:64 +0x13e kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*SetupNode).Run(0xc4203f0228, 0x14b3500, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/setup_nodes.go:15 +0x7f kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec.(*Spec).runSample(0xc4208bc2d0, 0x0, 0x14b3500, 0xc420059500) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec/spec.go:181 +0x1f1 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec.(*Spec).Run(0xc4208bc2d0, 0x14b3500, 0xc420059500) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec/spec.go:138 +0xff kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).runSpec(0xc42032d180, 0xc4208bc2d0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:200 +0x10d kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).runSpecs(0xc42032d180, 0x1) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:170 +0x329 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).Run(0xc42032d180, 0xb) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:66 +0x11b kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/suite.(*Suite).Run(0xc4200ceaf0, 0x7ff1bef0e5f8, 0xc42063ae10, 0x139dae6, 0xb, 0xc42011e9a0, 0x2, 0x2, 0x14cfd80, 0xc420059500, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/suite/suite.go:62 +0x27c kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo.RunSpecsWithCustomReporters(0x14b4560, 0xc42063ae10, 0x139dae6, 0xb, 0xc42011e900, 0x2, 0x2, 0x2) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/ginkgo_dsl.go:221 +0x258 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo.RunSpecsWithDefaultAndCustomReporters(0x14b4560, 0xc42063ae10, 0x139dae6, 0xb, 0xc420366810, 0x1, 0x1, 0x1) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/ginkgo_dsl.go:209 +0xab kubevirt.io/kubevirt/tests_test.TestTests(0xc42063ae10) /root/go/src/kubevirt.io/kubevirt/tests/tests_suite_test.go:43 +0xaa testing.tRunner(0xc42063ae10, 0x142d848) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:777 +0xd0 created by testing.(*T).Run /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:824 +0x2e0 goroutine 11 [chan receive, 90 minutes]: kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).registerForInterrupts(0xc42032d180, 0xc420049ce0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:223 +0xd1 created by kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).Run /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:60 +0x88 goroutine 12 [select, 90 minutes, locked to thread]: runtime.gopark(0x142fa20, 0x0, 0x1398025, 0x6, 0x18, 0x1) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/proc.go:291 +0x11a runtime.selectgo(0xc42047d750, 0xc420049da0) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/select.go:392 +0xe50 runtime.ensureSigM.func1() /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/signal_unix.go:549 +0x1f4 runtime.goexit() /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/asm_amd64.s:2361 +0x1 goroutine 27 [IO wait]: internal/poll.runtime_pollWait(0x7ff1beeedf00, 0x72, 0xc420693850) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/netpoll.go:173 +0x57 internal/poll.(*pollDesc).wait(0xc420a26398, 0x72, 0xffffffffffffff00, 0x14b5720, 0x1c447d0) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_poll_runtime.go:85 +0x9b internal/poll.(*pollDesc).waitRead(0xc420a26398, 0xc4206f8000, 0x8000, 0x8000) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_poll_runtime.go:90 +0x3d internal/poll.(*FD).Read(0xc420a26380, 0xc4206f8000, 0x8000, 0x8000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_unix.go:157 +0x17d net.(*netFD).Read(0xc420a26380, 0xc4206f8000, 0x8000, 0x8000, 0x0, 0x8, 0x7ffb) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/fd_unix.go:202 +0x4f net.(*conn).Read(0xc4203f03a8, 0xc4206f8000, 0x8000, 0x8000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/net.go:176 +0x6a crypto/tls.(*block).readFromUntil(0xc4206397d0, 0x7ff1bee0ddb0, 0xc4203f03a8, 0x5, 0xc4203f03a8, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:493 +0x96 crypto/tls.(*Conn).readRecord(0xc420698000, 0x142fb17, 0xc420698120, 0x20) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:595 +0xe0 crypto/tls.(*Conn).Read(0xc420698000, 0xc4203a1000, 0x1000, 0x1000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:1156 +0x100 bufio.(*Reader).Read(0xc420a10600, 0xc420a14498, 0x9, 0x9, 0xc4204ef798, 0xc4207c54c0, 0xc420693d10) /gimme/.gimme/versions/go1.10.linux.amd64/src/bufio/bufio.go:216 +0x238 io.ReadAtLeast(0x14b2300, 0xc420a10600, 0xc420a14498, 0x9, 0x9, 0x9, 0xc420693ce0, 0xc420693ce0, 0x406614) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/io.go:309 +0x86 io.ReadFull(0x14b2300, 0xc420a10600, 0xc420a14498, 0x9, 0x9, 0xc4204ef740, 0xc420693d10, 0xc400005101) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/io.go:327 +0x58 kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.readFrameHeader(0xc420a14498, 0x9, 0x9, 0x14b2300, 0xc420a10600, 0x0, 0xc400000000, 0x7efa2d, 0xc420693fb0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/frame.go:237 +0x7b kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*Framer).ReadFrame(0xc420a14460, 0xc4208849c0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/frame.go:492 +0xa4 kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*clientConnReadLoop).run(0xc420693fb0, 0x142e7a0, 0xc4204777b0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:1428 +0x8e kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*ClientConn).readLoop(0xc4202516c0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:1354 +0x76 created by kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*Transport).newClientConn /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:579 +0x651 goroutine 107 [chan send, 89 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4207cd830) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 1205 [chan send, 83 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc42098d530) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 10028 [chan send, 2 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420440ba0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 2226 [chan send, 77 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420900240) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 6345 [chan send, 26 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4203a6cf0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 659 [chan send, 86 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4203415c0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 1692 [chan send, 80 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4207dd380) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 8435 [chan send, 14 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420440330) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 8933 [chan send, 11 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420860e70) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 7888 [chan send, 17 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4205ecbd0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 9461 [chan send, 8 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420523ad0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 7354 [chan send, 20 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc42033bfb0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 6830 [chan send, 23 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4204090e0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 2713 [chan send, 74 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4204bad80) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 9980 [chan send, 5 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420a95950) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 make: *** [functest] Error 2 + make cluster-down ./cluster/down.sh