+ export WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + [[ openshift-3.10-release =~ openshift-.* ]] + [[ openshift-3.10-release =~ .*-crio-.* ]] + export KUBEVIRT_PROVIDER=os-3.10.0 + KUBEVIRT_PROVIDER=os-3.10.0 + echo 1 automation/test.sh: line 46: /proc/sys/net/bridge/bridge-nf-call-iptables: Permission denied + true + echo 1 automation/test.sh: line 47: /proc/sys/net/ipv4/ip_forward: Permission denied + true + export KUBEVIRT_NUM_NODES=2 + KUBEVIRT_NUM_NODES=2 + export NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + export NAMESPACE=kube-system + NAMESPACE=kube-system + trap '{ make cluster-down; }' EXIT SIGINT SIGTERM SIGSTOP + make cluster-down ./cluster/down.sh + make cluster-up ./cluster/up.sh Downloading ....... Downloading ....... 2018/07/25 07:53:14 Waiting for host: 192.168.66.102:22 2018/07/25 07:53:17 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/25 07:53:25 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/25 07:53:33 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/25 07:53:41 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/25 07:53:46 Connected to tcp://192.168.66.102:22 + systemctl stop origin-node.service + rm -rf /etc/origin/ /etc/etcd/ /var/lib/origin /var/lib/etcd/ ++ docker ps -q + containers= + '[' -n '' ']' ++ docker ps -q -a + containers='2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3' + '[' -n '2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3' ']' + docker rm -f 2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3 2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3 2018/07/25 07:53:49 Waiting for host: 192.168.66.101:22 2018/07/25 07:53:52 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/25 07:54:00 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/25 07:54:08 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/25 07:54:14 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: connection refused. Sleeping 5s 2018/07/25 07:54:19 Connected to tcp://192.168.66.101:22 + inventory_file=/root/inventory + openshift_ansible=/root/openshift-ansible + echo '[new_nodes]' + sed -i '/\[OSEv3:children\]/a new_nodes' /root/inventory + nodes_found=false ++ seq 2 100 + for i in '$(seq 2 100)' ++ printf node%02d 2 + node=node02 ++ printf 192.168.66.1%02d 2 + node_ip=192.168.66.102 + set +e + ping 192.168.66.102 -c 1 PING 192.168.66.102 (192.168.66.102) 56(84) bytes of data. 64 bytes from 192.168.66.102: icmp_seq=1 ttl=64 time=2.33 ms --- 192.168.66.102 ping statistics --- 1 packets transmitted, 1 received, 0% packet loss, time 0ms rtt min/avg/max/mdev = 2.339/2.339/2.339/0.000 ms Found node02. Adding it to the inventory. + '[' 0 -ne 0 ']' + nodes_found=true + set -e + echo '192.168.66.102 node02' + echo 'Found node02. Adding it to the inventory.' + echo 'node02 openshift_node_group_name="node-config-compute" openshift_schedulable=true openshift_ip=192.168.66.102' + for i in '$(seq 2 100)' ++ printf node%02d 3 + node=node03 ++ printf 192.168.66.1%02d 3 + node_ip=192.168.66.103 + set +e + ping 192.168.66.103 -c 1 PING 192.168.66.103 (192.168.66.103) 56(84) bytes of data. From 192.168.66.101 icmp_seq=1 Destination Host Unreachable --- 192.168.66.103 ping statistics --- 1 packets transmitted, 0 received, +1 errors, 100% packet loss, time 0ms + '[' 1 -ne 0 ']' + break + '[' true = true ']' + ansible-playbook -i /root/inventory /root/openshift-ansible/playbooks/openshift-node/scaleup.yml PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) PLAY [Ensure there are new_nodes] ********************************************** TASK [fail] ******************************************************************** skipping: [localhost] TASK [fail] ******************************************************************** skipping: [localhost] PLAY [Initialization Checkpoint Start] ***************************************** TASK [Set install initialization 'In Progress'] ******************************** ok: [node02] PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) [WARNING]: Could not match supplied host pattern, ignoring: oo_lb_to_config PLAY [Ensure that all non-node hosts are accessible] *************************** TASK [Gathering Facts] ********************************************************* ok: [node01] PLAY [Initialize basic host facts] ********************************************* TASK [Gathering Facts] ********************************************************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/deprecations.yml for node01, node02 TASK [openshift_sanitize_inventory : Check for usage of deprecated variables] *** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : debug] ************************************ skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_stats] ******************************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Assign deprecated variables to correct counterparts] *** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_logging.yml for node01, node02 included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_metrics.yml for node01, node02 TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Standardize on latest variable names] ***** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Normalize openshift_release] ************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Abort when openshift_release is invalid] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/unsupported.yml for node01, node02 TASK [openshift_sanitize_inventory : Ensure that openshift_use_dnsmasq is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that openshift_node_dnsmasq_install_network_manager_hook is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* skipping: [node01] => (item=openshift_hosted_etcd_storage_kind) skipping: [node02] => (item=openshift_hosted_etcd_storage_kind) TASK [openshift_sanitize_inventory : Ensure that dynamic provisioning is set if using dynamic storage] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure clusterid is set along with the cloudprovider] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure ansible_service_broker_remove and ansible_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure template_service_broker_remove and template_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that all requires vsphere configuration variables are set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : ensure provider configuration variables are defined] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure removed web console extension variables are not set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that web console port matches API server port] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : At least one master is schedulable] ******* skipping: [node01] skipping: [node02] TASK [Detecting Operating System from ostree_booted] *************************** ok: [node02] ok: [node01] TASK [set openshift_deployment_type if unset] ********************************** skipping: [node01] skipping: [node02] TASK [check for node already bootstrapped] ************************************* ok: [node01] ok: [node02] TASK [initialize_facts set fact openshift_is_bootstrapped] ********************* ok: [node02] ok: [node01] TASK [initialize_facts set fact openshift_is_atomic and openshift_is_containerized] *** ok: [node01] ok: [node02] TASK [Determine Atomic Host Docker Version] ************************************ skipping: [node01] skipping: [node02] TASK [assert atomic host docker version is 1.12 or later] ********************** skipping: [node01] skipping: [node02] PLAY [Retrieve existing master configs and validate] *************************** TASK [openshift_control_plane : stat] ****************************************** ok: [node01] TASK [openshift_control_plane : slurp] ***************************************** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [openshift_control_plane : Check for file paths outside of /etc/origin/master in master's config] *** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Initialize special first-master variables] ******************************* TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Disable web console if required] ***************************************** TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Setup yum repositories for all hosts] ************************************ TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Install Red Hat Subscription manager] ******************* skipping: [node02] TASK [rhel_subscribe : Is host already registered?] **************************** skipping: [node02] TASK [rhel_subscribe : Register host] ****************************************** skipping: [node02] TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Determine if OpenShift Pool Already Attached] *********** skipping: [node02] TASK [rhel_subscribe : Attach to OpenShift Pool] ******************************* skipping: [node02] TASK [rhel_subscribe : Satellite preparation] ********************************** skipping: [node02] TASK [openshift_repos : openshift_repos detect ostree] ************************* ok: [node02] TASK [openshift_repos : Ensure libselinux-python is installed] ***************** ok: [node02] TASK [openshift_repos : Remove openshift_additional.repo file] ***************** ok: [node02] TASK [openshift_repos : Create any additional repos that are defined] ********** TASK [openshift_repos : include_tasks] ***************************************** skipping: [node02] TASK [openshift_repos : include_tasks] ***************************************** included: /root/openshift-ansible/roles/openshift_repos/tasks/centos_repos.yml for node02 TASK [openshift_repos : Configure origin gpg keys] ***************************** ok: [node02] TASK [openshift_repos : Configure correct origin release repository] *********** ok: [node02] => (item=/root/openshift-ansible/roles/openshift_repos/templates/CentOS-OpenShift-Origin.repo.j2) TASK [openshift_repos : Ensure clean repo cache in the event repos have been changed manually] *** changed: [node02] => { "msg": "First run of openshift_repos" } TASK [openshift_repos : Record that openshift_repos already ran] *************** ok: [node02] RUNNING HANDLER [openshift_repos : refresh cache] ****************************** changed: [node02] PLAY [Install packages necessary for installer] ******************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Determine if chrony is installed] **************************************** changed: [node02] [WARNING]: Consider using the yum, dnf or zypper module rather than running rpm. If you need to use command because yum, dnf or zypper is insufficient you can add warn=False to this command task or set command_warnings=False in ansible.cfg to get rid of this message. TASK [Install ntp package] ***************************************************** skipping: [node02] TASK [Start and enable ntpd/chronyd] ******************************************* changed: [node02] TASK [Ensure openshift-ansible installer package deps are installed] *********** ok: [node02] => (item=iproute) ok: [node02] => (item=dbus-python) ok: [node02] => (item=PyYAML) ok: [node02] => (item=python-ipaddress) ok: [node02] => (item=libsemanage-python) ok: [node02] => (item=yum-utils) ok: [node02] => (item=python-docker) PLAY [Initialize cluster facts] ************************************************ TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [get openshift_current_version] ******************************************* ok: [node02] ok: [node01] TASK [set_fact openshift_portal_net if present on masters] ********************* ok: [node01] ok: [node02] TASK [Gather Cluster facts] **************************************************** changed: [node01] changed: [node02] TASK [Set fact of no_proxy_internal_hostnames] ********************************* skipping: [node01] skipping: [node02] TASK [Initialize openshift.node.sdn_mtu] *************************************** changed: [node02] ok: [node01] PLAY [Initialize etcd host variables] ****************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** ok: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** ok: [node01] => { "msg": "openshift_pkg_version was not defined. Falling back to -3.10.0" } TASK [openshift_version : set_fact] ******************************************** ok: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Verify Requirements] ***************************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [Run variable sanity checks] ********************************************** ok: [node01] TASK [Validate openshift_node_groups and openshift_node_group_name] ************ ok: [node01] PLAY [Initialization Checkpoint End] ******************************************* TASK [Set install initialization 'Complete'] *********************************** ok: [node02] PLAY [Validate node hostnames] ************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Query DNS for IP address of node02] ************************************** ok: [node02] TASK [Validate openshift_hostname when defined] ******************************** skipping: [node02] TASK [Validate openshift_ip exists on node when defined] *********************** skipping: [node02] PLAY [Configure os_firewall] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [os_firewall : Detecting Atomic Host Operating System] ******************** ok: [node02] TASK [os_firewall : Set fact r_os_firewall_is_atomic] ************************** ok: [node02] TASK [os_firewall : Fail - Firewalld is not supported on Atomic Host] ********** skipping: [node02] TASK [os_firewall : Install firewalld packages] ******************************** skipping: [node02] TASK [os_firewall : Ensure iptables services are not enabled] ****************** skipping: [node02] => (item=iptables) skipping: [node02] => (item=ip6tables) TASK [os_firewall : Wait 10 seconds after disabling iptables] ****************** skipping: [node02] TASK [os_firewall : Start and enable firewalld service] ************************ skipping: [node02] TASK [os_firewall : need to pause here, otherwise the firewalld service starting can sometimes cause ssh to fail] *** skipping: [node02] TASK [os_firewall : Restart polkitd] ******************************************* skipping: [node02] TASK [os_firewall : Wait for polkit action to have been created] *************** skipping: [node02] TASK [os_firewall : Ensure firewalld service is not enabled] ******************* ok: [node02] TASK [os_firewall : Wait 10 seconds after disabling firewalld] ***************** skipping: [node02] TASK [os_firewall : Install iptables packages] ********************************* ok: [node02] => (item=iptables) ok: [node02] => (item=iptables-services) TASK [os_firewall : Start and enable iptables service] ************************* ok: [node02 -> node02] => (item=node02) TASK [os_firewall : need to pause here, otherwise the iptables service starting can sometimes cause ssh to fail] *** skipping: [node02] PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [container_runtime : Setup the docker-storage for overlay] **************** skipping: [node02] TASK [container_runtime : Create file system on extra volume device] *********** TASK [container_runtime : Create mount entry for extra volume] ***************** PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** ok: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* ok: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Add http_proxy to /etc/atomic.conf] ****************** skipping: [node02] TASK [container_runtime : Add https_proxy to /etc/atomic.conf] ***************** skipping: [node02] TASK [container_runtime : Add no_proxy to /etc/atomic.conf] ******************** skipping: [node02] TASK [container_runtime : Get current installed Docker version] **************** ok: [node02] TASK [container_runtime : Error out if Docker pre-installed but too old] ******* skipping: [node02] TASK [container_runtime : Error out if requested Docker is too old] ************ skipping: [node02] TASK [container_runtime : Install Docker] ************************************** skipping: [node02] TASK [container_runtime : Ensure docker.service.d directory exists] ************ ok: [node02] TASK [container_runtime : Configure Docker service unit file] ****************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Set registry params] ********************************* skipping: [node02] => (item={u'reg_conf_var': u'ADD_REGISTRY', u'reg_flag': u'--add-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'BLOCK_REGISTRY', u'reg_flag': u'--block-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'INSECURE_REGISTRY', u'reg_flag': u'--insecure-registry', u'reg_fact_val': []}) TASK [container_runtime : Place additional/blocked/insecure registries in /etc/containers/registries.conf] *** skipping: [node02] TASK [container_runtime : Set Proxy Settings] ********************************** skipping: [node02] => (item={u'reg_conf_var': u'HTTP_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'HTTPS_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'NO_PROXY', u'reg_fact_val': u''}) TASK [container_runtime : Set various Docker options] ************************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Configure Docker Network OPTIONS] ******************** ok: [node02] TASK [container_runtime : Detect if docker is already started] ***************** ok: [node02] TASK [container_runtime : Start the Docker service] **************************** ok: [node02] TASK [container_runtime : set_fact] ******************************************** ok: [node02] TASK [container_runtime : Check for docker_storage_path/overlay2] ************** ok: [node02] TASK [container_runtime : Fixup SELinux permissions for docker] **************** changed: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* ok: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** ok: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** ok: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] TASK [container_runtime : Fail if Atomic Host since this is an rpm request] **** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Check that overlay is in the kernel] ***************** skipping: [node02] TASK [container_runtime : Add overlay to modprobe.d] *************************** skipping: [node02] TASK [container_runtime : Manually modprobe overlay into the kernel] *********** skipping: [node02] TASK [container_runtime : Enable and start systemd-modules-load] *************** skipping: [node02] TASK [container_runtime : Install cri-o] *************************************** skipping: [node02] TASK [container_runtime : Remove CRI-O default configuration files] ************ skipping: [node02] => (item=/etc/cni/net.d/200-loopback.conf) skipping: [node02] => (item=/etc/cni/net.d/100-crio-bridge.conf) TASK [container_runtime : Create the CRI-O configuration] ********************** skipping: [node02] TASK [container_runtime : Ensure CNI configuration directory exists] *********** skipping: [node02] TASK [container_runtime : Add iptables allow rules] **************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove iptables rules] ******************************* TASK [container_runtime : Add firewalld allow rules] *************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove firewalld allow rules] ************************ TASK [container_runtime : Configure the CNI network] *************************** skipping: [node02] TASK [container_runtime : Create /etc/sysconfig/crio-network] ****************** skipping: [node02] TASK [container_runtime : Start the CRI-O service] ***************************** skipping: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* skipping: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** skipping: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** skipping: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** skipping: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Node Preparation Checkpoint Start] *************************************** TASK [Set Node preparation 'In Progress'] ************************************** ok: [node02] PLAY [Only target nodes that have not yet been bootstrapped] ******************* TASK [Gathering Facts] ********************************************************* ok: [localhost] TASK [add_host] **************************************************************** skipping: [localhost] => (item=node02) ok: [localhost] => (item=node01) PLAY [Disable excluders] ******************************************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/disable.yml for node02 TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-docker-excluder version detected] ************ skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-docker-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-excluder version detected] ******************* skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** skipping: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] PLAY [Configure nodes] ********************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_cloud_provider : Set cloud provider facts] ********************* skipping: [node02] TASK [openshift_cloud_provider : Create cloudprovider config dir] ************** skipping: [node02] TASK [openshift_cloud_provider : include the defined cloud provider files] ***** skipping: [node02] TASK [openshift_node : fail] *************************************************** skipping: [node02] TASK [openshift_node : Check for NetworkManager service] *********************** ok: [node02] TASK [openshift_node : Set fact using_network_manager] ************************* ok: [node02] TASK [openshift_node : Install dnsmasq] **************************************** ok: [node02] TASK [openshift_node : ensure origin/node directory exists] ******************** changed: [node02] => (item=/etc/origin) changed: [node02] => (item=/etc/origin/node) TASK [openshift_node : Install NetworkManager during node_bootstrap provisioning] *** skipping: [node02] TASK [openshift_node : Install network manager dispatch script] **************** skipping: [node02] TASK [openshift_node : Install dnsmasq configuration] ************************** ok: [node02] TASK [openshift_node : Deploy additional dnsmasq.conf] ************************* skipping: [node02] TASK [openshift_node : Enable dnsmasq] ***************************************** ok: [node02] TASK [openshift_node : Install network manager dispatch script] **************** ok: [node02] TASK [openshift_node : Add iptables allow rules] ******************************* ok: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) ok: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) ok: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) ok: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) ok: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove iptables rules] ********************************** TASK [openshift_node : Add firewalld allow rules] ****************************** skipping: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) skipping: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) skipping: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) skipping: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) skipping: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove firewalld allow rules] *************************** TASK [openshift_node : Checking for journald.conf] ***************************** ok: [node02] TASK [openshift_node : Create journald persistence directories] **************** ok: [node02] TASK [openshift_node : Update journald setup] ********************************** ok: [node02] => (item={u'var': u'Storage', u'val': u'persistent'}) ok: [node02] => (item={u'var': u'Compress', u'val': True}) ok: [node02] => (item={u'var': u'SyncIntervalSec', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitInterval', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitBurst', u'val': 10000}) ok: [node02] => (item={u'var': u'SystemMaxUse', u'val': u'8G'}) ok: [node02] => (item={u'var': u'SystemKeepFree', u'val': u'20%'}) ok: [node02] => (item={u'var': u'SystemMaxFileSize', u'val': u'10M'}) ok: [node02] => (item={u'var': u'MaxRetentionSec', u'val': u'1month'}) ok: [node02] => (item={u'var': u'MaxFileSec', u'val': u'1day'}) ok: [node02] => (item={u'var': u'ForwardToSyslog', u'val': False}) ok: [node02] => (item={u'var': u'ForwardToWall', u'val': False}) TASK [openshift_node : Restart journald] *************************************** skipping: [node02] TASK [openshift_node : Disable swap] ******************************************* ok: [node02] TASK [openshift_node : Install node, clients, and conntrack packages] ********** ok: [node02] => (item={u'name': u'origin-node-3.10.0*'}) ok: [node02] => (item={u'name': u'origin-clients-3.10.0*'}) ok: [node02] => (item={u'name': u'conntrack-tools'}) TASK [openshift_node : Restart cri-o] ****************************************** skipping: [node02] TASK [openshift_node : restart NetworkManager to ensure resolv.conf is present] *** changed: [node02] TASK [openshift_node : sysctl] ************************************************* ok: [node02] TASK [openshift_node : Check for credentials file for registry auth] *********** skipping: [node02] TASK [openshift_node : Create credentials for registry auth] ******************* skipping: [node02] TASK [openshift_node : Create credentials for registry auth (alternative)] ***** skipping: [node02] TASK [openshift_node : Setup ro mount of /root/.docker for containerized hosts] *** skipping: [node02] TASK [openshift_node : Check that node image is present] *********************** changed: [node02] TASK [openshift_node : Pre-pull node image] ************************************ skipping: [node02] TASK [openshift_node : Copy node script to the node] *************************** ok: [node02] TASK [openshift_node : Install Node service file] ****************************** ok: [node02] TASK [openshift_node : Ensure old system path is set] ************************** skipping: [node02] => (item=/etc/origin/openvswitch) skipping: [node02] => (item=/var/lib/kubelet) skipping: [node02] => (item=/opt/cni/bin) TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Copy node container image to ostree storage] ************ skipping: [node02] TASK [openshift_node : Install or Update node system container] **************** skipping: [node02] TASK [openshift_node : Restart network manager to ensure networking configuration is in place] *** skipping: [node02] TASK [openshift_node : Configure Node settings] ******************************** ok: [node02] => (item={u'regex': u'^OPTIONS=', u'line': u'OPTIONS='}) ok: [node02] => (item={u'regex': u'^DEBUG_LOGLEVEL=', u'line': u'DEBUG_LOGLEVEL=2'}) ok: [node02] => (item={u'regex': u'^IMAGE_VERSION=', u'line': u'IMAGE_VERSION=v3.10.0-rc.0'}) TASK [openshift_node : Configure Proxy Settings] ******************************* skipping: [node02] => (item={u'regex': u'^HTTP_PROXY=', u'line': u'HTTP_PROXY='}) skipping: [node02] => (item={u'regex': u'^HTTPS_PROXY=', u'line': u'HTTPS_PROXY='}) skipping: [node02] => (item={u'regex': u'^NO_PROXY=', u'line': u'NO_PROXY=[],172.30.0.0/16,10.128.0.0/14'}) TASK [openshift_node : file] *************************************************** skipping: [node02] TASK [openshift_node : Create the Node config] ********************************* changed: [node02] TASK [openshift_node : Configure Node Environment Variables] ******************* TASK [openshift_node : Ensure the node static pod directory exists] ************ changed: [node02] TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Install NFS storage plugin dependencies] **************** ok: [node02] TASK [openshift_node : Check for existence of nfs sebooleans] ****************** ok: [node02] => (item=virt_use_nfs) ok: [node02] => (item=virt_sandbox_use_nfs) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-25 08:02:16.984023', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.010625', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-25 08:02:16.973398', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-25 08:02:18.471882', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.014231', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-25 08:02:18.457651', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-25 08:02:16.984023', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.010625', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-25 08:02:16.973398', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-25 08:02:18.471882', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.014231', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-25 08:02:18.457651', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install GlusterFS storage plugin dependencies] ********** ok: [node02] TASK [openshift_node : Check for existence of fusefs sebooleans] *************** ok: [node02] => (item=virt_use_fusefs) ok: [node02] => (item=virt_sandbox_use_fusefs) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-25 08:02:26.219010', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.012225', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-25 08:02:26.206785', '_ansible_ignore_errors': None, 'failed': False}) ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-25 08:02:27.649599', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.010497', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-25 08:02:27.639102', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-25 08:02:26.219010', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.012225', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-25 08:02:26.206785', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-25 08:02:27.649599', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.010497', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-25 08:02:27.639102', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install Ceph storage plugin dependencies] *************** ok: [node02] TASK [openshift_node : Install iSCSI storage plugin dependencies] ************** ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=device-mapper-multipath) TASK [openshift_node : restart services] *************************************** ok: [node02] => (item=multipathd) ok: [node02] => (item=rpcbind) ok: [node02] => (item=iscsid) TASK [openshift_node : Template multipath configuration] *********************** changed: [node02] TASK [openshift_node : Enable and start multipath] ***************************** changed: [node02] TASK [tuned : Check for tuned package] ***************************************** ok: [node02] TASK [tuned : Set tuned OpenShift variables] *********************************** ok: [node02] TASK [tuned : Ensure directory structure exists] ******************************* ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Ensure files are populated from templates] *********************** skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Make tuned use the recommended tuned profile on restart] ********* changed: [node02] => (item=/etc/tuned/active_profile) changed: [node02] => (item=/etc/tuned/profile_mode) TASK [tuned : Restart tuned service] ******************************************* changed: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Install logrotate] ******* ok: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Setup logrotate.d scripts] *** PLAY [node bootstrap config] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_node : install needed rpm(s)] ********************************** ok: [node02] => (item=origin-node) ok: [node02] => (item=origin-docker-excluder) ok: [node02] => (item=ansible) ok: [node02] => (item=bash-completion) ok: [node02] => (item=docker) ok: [node02] => (item=haproxy) ok: [node02] => (item=dnsmasq) ok: [node02] => (item=ntp) ok: [node02] => (item=logrotate) ok: [node02] => (item=httpd-tools) ok: [node02] => (item=bind-utils) ok: [node02] => (item=firewalld) ok: [node02] => (item=libselinux-python) ok: [node02] => (item=conntrack-tools) ok: [node02] => (item=openssl) ok: [node02] => (item=iproute) ok: [node02] => (item=python-dbus) ok: [node02] => (item=PyYAML) ok: [node02] => (item=yum-utils) ok: [node02] => (item=glusterfs-fuse) ok: [node02] => (item=device-mapper-multipath) ok: [node02] => (item=nfs-utils) ok: [node02] => (item=cockpit-ws) ok: [node02] => (item=cockpit-system) ok: [node02] => (item=cockpit-bridge) ok: [node02] => (item=cockpit-docker) ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=ceph-common) TASK [openshift_node : create the directory for node] ************************** skipping: [node02] TASK [openshift_node : laydown systemd override] ******************************* skipping: [node02] TASK [openshift_node : update the sysconfig to have necessary variables] ******* ok: [node02] => (item={u'regexp': u'^KUBECONFIG=.*', u'line': u'KUBECONFIG=/etc/origin/node/bootstrap.kubeconfig'}) TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : disable origin-node service] **************************** changed: [node02] => (item=origin-node.service) TASK [openshift_node : Check for RPM generated config marker file .config_managed] *** ok: [node02] TASK [openshift_node : create directories for bootstrapping] ******************* ok: [node02] => (item=/root/openshift_bootstrap) changed: [node02] => (item=/var/lib/origin/openshift.local.config) changed: [node02] => (item=/var/lib/origin/openshift.local.config/node) ok: [node02] => (item=/etc/docker/certs.d/docker-registry.default.svc:5000) TASK [openshift_node : laydown the bootstrap.yml file for on boot configuration] *** ok: [node02] TASK [openshift_node : Create a symlink to the node client CA for the docker registry] *** ok: [node02] TASK [openshift_node : Remove RPM generated config files if present] *********** skipping: [node02] => (item=master) skipping: [node02] => (item=.config_managed) TASK [openshift_node : find all files in /etc/origin/node so we can remove them] *** skipping: [node02] TASK [openshift_node : Remove everything except the resolv.conf required for node] *** skipping: [node02] TASK [openshift_node_group : create node config template] ********************** changed: [node02] TASK [openshift_node_group : remove existing node config] ********************** changed: [node02] TASK [openshift_node_group : Ensure required directories are present] ********** ok: [node02] => (item=/etc/origin/node/pods) changed: [node02] => (item=/etc/origin/node/certificates) TASK [openshift_node_group : Update the sysconfig to group "node-config-compute"] *** changed: [node02] TASK [set_fact] **************************************************************** ok: [node02] PLAY [Re-enable excluder if it was previously enabled] ************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/enable.yml for node02 TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] PLAY [Node Preparation Checkpoint End] ***************************************** TASK [Set Node preparation 'Complete'] ***************************************** ok: [node01] PLAY [Distribute bootstrap and start nodes] ************************************ TASK [openshift_node : Gather node information] ******************************** changed: [node02] ok: [node01] TASK [openshift_node : Copy master bootstrap config locally] ******************* ok: [node02] TASK [openshift_node : Distribute bootstrap kubeconfig if one does not exist] *** ok: [node01] changed: [node02] TASK [openshift_node : Start and enable node for bootstrapping] **************** changed: [node02] changed: [node01] TASK [openshift_node : Get node logs] ****************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : debug] ************************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : fail] *************************************************** skipping: [node02] skipping: [node01] PLAY [Approve any pending CSR requests from inventory nodes] ******************* TASK [Dump all candidate bootstrap hostnames] ********************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Find all hostnames for bootstrapping] ************************************ ok: [node01] TASK [Dump the bootstrap hostnames] ******************************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Approve bootstrap nodes] ************************************************* changed: [node01] TASK [Get CSRs] **************************************************************** skipping: [node01] TASK [Report approval errors] ************************************************** skipping: [node01] PLAY [Ensure any inventory labels are applied to the nodes] ******************** TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [openshift_manage_node : Wait for master API to become available before proceeding] *** skipping: [node02] TASK [openshift_manage_node : Wait for Node Registration] ********************** ok: [node01 -> node01] ok: [node02 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/config.yml for node02, node01 TASK [openshift_manage_node : Set node schedulability] ************************* ok: [node02 -> node01] ok: [node01 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/set_default_node_role.yml for node02, node01 TASK [openshift_manage_node : Retrieve nodes that are marked with the infra selector or the legacy infra selector] *** ok: [node02 -> node01] TASK [openshift_manage_node : Label infra or legacy infra nodes with the new role label] *** TASK [openshift_manage_node : Retrieve non-infra, non-master nodes that are not yet labeled compute] *** ok: [node02 -> node01] TASK [openshift_manage_node : label non-master non-infra nodes compute] ******** TASK [openshift_manage_node : Label all-in-one master as a compute node] ******* skipping: [node02] PLAY RECAP ********************************************************************* localhost : ok=30 changed=0 unreachable=0 failed=0 node01 : ok=68 changed=3 unreachable=0 failed=0 node02 : ok=158 changed=33 unreachable=0 failed=0 INSTALLER STATUS *************************************************************** Initialization : Complete (0:03:37) Node Preparation : Complete (0:04:59) Sending file modes: C0755 110489328 oc Sending file modes: C0600 5649 admin.kubeconfig Cluster "node01:8443" set. Cluster "node01:8443" set. + set +e + kubectl get nodes --no-headers + cluster/kubectl.sh get nodes --no-headers node01 Ready compute,infra,master 17d v1.10.0+b81c8f8 node02 Ready compute 59s v1.10.0+b81c8f8 + kubectl_rc=0 + '[' 0 -ne 0 ']' ++ kubectl get nodes --no-headers ++ cluster/kubectl.sh get nodes --no-headers ++ grep NotReady + '[' -n '' ']' + set -e + echo 'Nodes are ready:' Nodes are ready: + kubectl get nodes + cluster/kubectl.sh get nodes NAME STATUS ROLES AGE VERSION node01 Ready compute,infra,master 17d v1.10.0+b81c8f8 node02 Ready compute 1m v1.10.0+b81c8f8 + make cluster-sync ./cluster/build.sh Building ... Untagged: localhost:33487/kubevirt/virt-controller:devel Untagged: localhost:33487/kubevirt/virt-controller@sha256:bc24296e9023dcb916dfc9dcd20dadc3155042875006cf76c21ca0ff9835731a Deleted: sha256:44beba76eafb2e7b04232ad8a3ae77fd3ba431cdcd36ffea5f36a6709d6b6f56 Deleted: sha256:304974de84fda3519cab0e76971ad6fa5ee19183fc79beabb6206af4f70a998b Deleted: sha256:d0dc28451e5bb3aa764f07c6f9fcbb8e07d85b3ede15453dde2ebdaf2bdc3b4c Deleted: sha256:94894adf23c429289b245313e28f7fdd31441289d658993e33563687b4755ad7 Untagged: localhost:33487/kubevirt/virt-launcher:devel Untagged: localhost:33487/kubevirt/virt-launcher@sha256:bee621850e165128740b2e174beb0e84659afe36ac1ba7adcfff5d8f0bca7e46 Deleted: sha256:68618c665c91ddbfb8e6440062e8e8d0de68df4fb55da57b491854914860a5b5 Deleted: sha256:8fe492d48ccb672a22d4b81e7e87863a221843999dc39a2a6610f9d5f9c46da1 Deleted: sha256:18d25db21aa6af69fcee7d3f67d7bb42cb6020dc4b0be6da34190be167b64077 Deleted: sha256:83b223c32f095227cc9eaca05817b87575e8dd08e320fa50b9fd189bb1c735e4 Deleted: sha256:78a430b58827ab110167f5807beff35ca9caeefae072bc04cfcd6d54b1d06f4d Deleted: sha256:733c4b928bc821718a868e6f8a4a62749f485ec9830ec636f2e670cd043e815b Deleted: sha256:0fa34f89159a1c73e7e50873f54ddc9204f22ef11b292bcd19d40e9c44978ba7 Deleted: sha256:254f4972bb20a75e2d8d2c5668d1191b82c259948eeaee62ba10e5a1e6d821f3 Deleted: sha256:0fc3e3c82d5b29ed50ba07496a725cd736da043c80ad14e4870f163b5986829a Deleted: sha256:3df06a9b5508394446b171926bc6f3410f29f77221d10aab90a6e49525a7cd34 Deleted: sha256:9445842678dd191841635aeb724bf38c85b780744fc1c409107616c1c8a14af5 Deleted: sha256:b0e729127b38203a74c1aa24f0b4b1d433ffef1da04c335fe6b196e3c5a80ea7 Untagged: localhost:33487/kubevirt/virt-handler:devel Untagged: localhost:33487/kubevirt/virt-handler@sha256:dea05ae2540240337cfffd9f18d94be3d557ee720fe5773256a3fcb9dcd3e647 Deleted: sha256:50064eec44ef2d7cf7b607f050a5f90cb0b9fafaff5b4ddff50f373333ddde8f Deleted: sha256:a94824655d5e43782e194dda5009e1f9d67616d8ab9dd434a36bf080841c6841 Deleted: sha256:679d6d32e9823572827983bd825727e9727aa1a47b37745a6fa936a1def6aedf Deleted: sha256:3ac09f1a902c389d3ae20ffa528c00911417c65737176c921a983fccd3aa18af Untagged: localhost:33487/kubevirt/virt-api:devel Untagged: localhost:33487/kubevirt/virt-api@sha256:f2a04da3bea64baac7331664d611c974f0087334531d37a2e47303a9733836f5 Deleted: sha256:f1a92fe08c0cfccf28b49b2e1bc88a58f10feeeeb5053f79ca94d0c69a609369 Deleted: sha256:22823a60a4c2c2b1578abef84205b2648236b7b21c8908c74903ef03587233d5 Deleted: sha256:4b9f28ffaf41ddddb921a17c25c021ae9f458fc359de63dfb00005617c334d57 Deleted: sha256:1fc4dab1291e0a4d1370d31f4d32756bb0216ae4c25e1a4f3028222bc4763343 Untagged: localhost:33487/kubevirt/subresource-access-test:devel Untagged: localhost:33487/kubevirt/subresource-access-test@sha256:12c433df63e9523f79485db5eae7e46b5b6062242ef61b16240f0aa6608ada77 Deleted: sha256:474b167a437cd61848d041f3b3b81b312b3de636fd028e28e6e888fe8d4f0365 Deleted: sha256:e7b110b5519a076beae96679750111a4dcec1a19ff73abdad318a5627ab82569 Deleted: sha256:1e3bcfb900b306dafd9fbfa710754e11a46c640922f4c6ffdfaba6c597c0e272 Deleted: sha256:19eb4b870e7619f8fc0e73ea70d67088eaa000d660221c4a869b8c4216893d13 Untagged: localhost:33479/kubevirt/example-hook-sidecar:devel Untagged: localhost:33479/kubevirt/example-hook-sidecar@sha256:90aba567ea0b24a84a8ab3be10353555509e3558d0cdf800876fa8d4d827873e Deleted: sha256:a1aad47f92e7adb8a44cc640523f6f1b0e0ceb479e3825677189644cbadacd80 Deleted: sha256:c8db78d0f7efc8000197839e37d4b600144934b111b02ec5a025464031cccf3c Deleted: sha256:73b05f7e25edfb27b6ca5b133cfaae0adfa6227c1e8ca64c96d32ff08cd6f157 Deleted: sha256:b7a66fe1c146a9440d89cb00766c5bddb395d7b96458460a33057d18b5e1ba81 sha256:8314c812ee3200233db076e79036b39759d30fc3dd7fe921b323b19b14306fd6 go version go1.10 linux/amd64 go version go1.10 linux/amd64 make[1]: Entering directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' hack/dockerized "./hack/check.sh && KUBEVIRT_VERSION= ./hack/build-go.sh install " && ./hack/build-copy-artifacts.sh sha256:8314c812ee3200233db076e79036b39759d30fc3dd7fe921b323b19b14306fd6 go version go1.10 linux/amd64 go version go1.10 linux/amd64 find: '/root/go/src/kubevirt.io/kubevirt/_out/cmd': No such file or directory Compiling tests... compiled tests.test hack/build-docker.sh build Sending build context to Docker daemon 40.35 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 2405aa62579a Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-controller ---> Using cache ---> 1ac62e99a9e7 Step 4/8 : WORKDIR /home/virt-controller ---> Using cache ---> c7b69424a0c5 Step 5/8 : USER 1001 ---> Using cache ---> e60ed5d8e78a Step 6/8 : COPY virt-controller /usr/bin/virt-controller ---> 1531a9055c6e Removing intermediate container edeabac4b39a Step 7/8 : ENTRYPOINT /usr/bin/virt-controller ---> Running in 1b454e4f3c79 ---> 7b6a6979dbcd Removing intermediate container 1b454e4f3c79 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-controller" '' ---> Running in dbe2f33c00bf ---> cc520699a27e Removing intermediate container dbe2f33c00bf Successfully built cc520699a27e Sending build context to Docker daemon 42.63 MB Step 1/10 : FROM kubevirt/libvirt:4.2.0 ---> 5f0bfe81a3e0 Step 2/10 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 65f548d54a2e Step 3/10 : RUN dnf -y install socat genisoimage util-linux libcgroup-tools ethtool net-tools sudo && dnf -y clean all && test $(id -u qemu) = 107 # make sure that the qemu user really is 107 ---> Using cache ---> 04ae26de19c4 Step 4/10 : COPY virt-launcher /usr/bin/virt-launcher ---> 05a881eae374 Removing intermediate container 1e260a4d89ed Step 5/10 : COPY kubevirt-sudo /etc/sudoers.d/kubevirt ---> 249fc8ec5705 Removing intermediate container dcf1c2ca3886 Step 6/10 : RUN setcap CAP_NET_BIND_SERVICE=+eip /usr/bin/qemu-system-x86_64 ---> Running in 60901cfae2ae  ---> a5251599969d Removing intermediate container 60901cfae2ae Step 7/10 : RUN mkdir -p /usr/share/kubevirt/virt-launcher ---> Running in ec2f30101d1f  ---> 65ad2d54b39a Removing intermediate container ec2f30101d1f Step 8/10 : COPY entrypoint.sh libvirtd.sh sock-connector /usr/share/kubevirt/virt-launcher/ ---> 4aed95cf3506 Removing intermediate container d5526a364704 Step 9/10 : ENTRYPOINT /usr/share/kubevirt/virt-launcher/entrypoint.sh ---> Running in 87a13845e749 ---> fb1728b906ef Removing intermediate container 87a13845e749 Step 10/10 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-launcher" '' ---> Running in dabb85200e20 ---> d1a7935f3efa Removing intermediate container dabb85200e20 Successfully built d1a7935f3efa Sending build context to Docker daemon 41.65 MB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 2405aa62579a Step 3/5 : COPY virt-handler /usr/bin/virt-handler ---> 4140859c5e6f Removing intermediate container 97df2c29f461 Step 4/5 : ENTRYPOINT /usr/bin/virt-handler ---> Running in c45ae4a97e59 ---> 5d7c5217979a Removing intermediate container c45ae4a97e59 Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-handler" '' ---> Running in b26ec5358593 ---> d2e16df7f4ab Removing intermediate container b26ec5358593 Successfully built d2e16df7f4ab Sending build context to Docker daemon 38.75 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 2405aa62579a Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-api ---> Using cache ---> 830d77e8a3bb Step 4/8 : WORKDIR /home/virt-api ---> Using cache ---> 7075b0c3cdfd Step 5/8 : USER 1001 ---> Using cache ---> 4e21374fdc1d Step 6/8 : COPY virt-api /usr/bin/virt-api ---> 0ae8e117c190 Removing intermediate container 552a9bf5dc37 Step 7/8 : ENTRYPOINT /usr/bin/virt-api ---> Running in 466db0d837ec ---> 7301e8c41f98 Removing intermediate container 466db0d837ec Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-api" '' ---> Running in b5c426f62071 ---> 108e5d1e78d4 Removing intermediate container b5c426f62071 Successfully built 108e5d1e78d4 Sending build context to Docker daemon 4.096 kB Step 1/7 : FROM fedora:28 ---> cc510acfcd70 Step 2/7 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 2405aa62579a Step 3/7 : ENV container docker ---> Using cache ---> 3370e25ee81a Step 4/7 : RUN mkdir -p /images/custom /images/alpine && truncate -s 64M /images/custom/disk.img && curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /images/alpine/disk.img ---> Using cache ---> 3f571283fdaa Step 5/7 : ADD entrypoint.sh / ---> Using cache ---> 2722b024d103 Step 6/7 : CMD /entrypoint.sh ---> Using cache ---> 8458081a089b Step 7/7 : LABEL "disks-images-provider" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Using cache ---> e81ad9c5b2e7 Successfully built e81ad9c5b2e7 Sending build context to Docker daemon 2.56 kB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 2405aa62579a Step 3/5 : ENV container docker ---> Using cache ---> 3370e25ee81a Step 4/5 : RUN dnf -y install procps-ng nmap-ncat && dnf -y clean all ---> Using cache ---> 006e94a74def Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "vm-killer" '' ---> Using cache ---> 01fc8a132f99 Successfully built 01fc8a132f99 Sending build context to Docker daemon 5.12 kB Step 1/7 : FROM debian:sid ---> 496290160351 Step 2/7 : MAINTAINER "David Vossel" \ ---> Using cache ---> 081acc82039b Step 3/7 : ENV container docker ---> Using cache ---> 87a43203841c Step 4/7 : RUN apt-get update && apt-get install -y bash curl bzip2 qemu-utils && mkdir -p /disk && rm -rf /var/lib/apt/lists/* ---> Using cache ---> bbc83781e0a9 Step 5/7 : ADD entry-point.sh / ---> Using cache ---> c588d7a778a6 Step 6/7 : CMD /entry-point.sh ---> Using cache ---> e28b44b64988 Step 7/7 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "registry-disk-v1alpha" '' ---> Using cache ---> e11bb84f4d97 Successfully built e11bb84f4d97 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33567/kubevirt/registry-disk-v1alpha:devel ---> e11bb84f4d97 Step 2/4 : MAINTAINER "David Vossel" \ ---> Using cache ---> 203987dfb356 Step 3/4 : RUN curl https://download.cirros-cloud.net/0.4.0/cirros-0.4.0-x86_64-disk.img > /disk/cirros.img ---> Using cache ---> e89a7b9e613a Step 4/4 : LABEL "cirros-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Using cache ---> d8b4b036c834 Successfully built d8b4b036c834 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33567/kubevirt/registry-disk-v1alpha:devel ---> e11bb84f4d97 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 203b9a9a9b6f Step 3/4 : RUN curl -g -L https://download.fedoraproject.org/pub/fedora/linux/releases/27/CloudImages/x86_64/images/Fedora-Cloud-Base-27-1.6.x86_64.qcow2 > /disk/fedora.qcow2 ---> Using cache ---> 5c3ab03ad56f Step 4/4 : LABEL "fedora-cloud-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Using cache ---> f16803ed7c89 Successfully built f16803ed7c89 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33567/kubevirt/registry-disk-v1alpha:devel ---> e11bb84f4d97 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 203b9a9a9b6f Step 3/4 : RUN curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /disk/alpine.iso ---> Using cache ---> 94806a94f275 Step 4/4 : LABEL "alpine-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Using cache ---> d9a071ce84a3 Successfully built d9a071ce84a3 Sending build context to Docker daemon 35.56 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 2405aa62579a Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virtctl ---> Using cache ---> 939ec18dc9a4 Step 4/8 : WORKDIR /home/virtctl ---> Using cache ---> 52b6bf037d32 Step 5/8 : USER 1001 ---> Using cache ---> 1e1560e0af32 Step 6/8 : COPY subresource-access-test /subresource-access-test ---> 00eba70b0f56 Removing intermediate container d79f53372fb0 Step 7/8 : ENTRYPOINT /subresource-access-test ---> Running in 0db9e2cf7627 ---> 1da47be25c5a Removing intermediate container 0db9e2cf7627 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "subresource-access-test" '' ---> Running in 49dd28778070 ---> 699e816daf78 Removing intermediate container 49dd28778070 Successfully built 699e816daf78 Sending build context to Docker daemon 3.072 kB Step 1/9 : FROM fedora:28 ---> cc510acfcd70 Step 2/9 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 2405aa62579a Step 3/9 : ENV container docker ---> Using cache ---> 3370e25ee81a Step 4/9 : RUN dnf -y install make git gcc && dnf -y clean all ---> Using cache ---> 3129352c97b1 Step 5/9 : ENV GIMME_GO_VERSION 1.9.2 ---> Using cache ---> fbcd5a15f974 Step 6/9 : RUN mkdir -p /gimme && curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | HOME=/gimme bash >> /etc/profile.d/gimme.sh ---> Using cache ---> 6e560dc836a0 Step 7/9 : ENV GOPATH "/go" GOBIN "/usr/bin" ---> Using cache ---> 8a916bbc2352 Step 8/9 : RUN mkdir -p /go && source /etc/profile.d/gimme.sh && go get github.com/masterzen/winrm-cli ---> Using cache ---> 72d00ac082db Step 9/9 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "winrmcli" '' ---> Using cache ---> b8dc9e736e1e Successfully built b8dc9e736e1e Sending build context to Docker daemon 36.77 MB Step 1/5 : FROM fedora:27 ---> 9110ae7f579f Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 0ae71e3c9e56 Step 3/5 : COPY example-hook-sidecar /example-hook-sidecar ---> cdc31216ee0b Removing intermediate container da199ab45e11 Step 4/5 : ENTRYPOINT /example-hook-sidecar ---> Running in 48aa94f23dd8 ---> 8810357474cf Removing intermediate container 48aa94f23dd8 Step 5/5 : LABEL "example-hook-sidecar" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Running in ca5bde6b8d90 ---> 9e1bb0c29511 Removing intermediate container ca5bde6b8d90 Successfully built 9e1bb0c29511 hack/build-docker.sh push The push refers to a repository [localhost:33567/kubevirt/virt-controller] 15fb52237589: Preparing d07058c760ad: Preparing 891e1e4ef82a: Preparing d07058c760ad: Pushed 15fb52237589: Pushed 891e1e4ef82a: Pushed devel: digest: sha256:44f0c3dc51a603364ade35be98c472f6c2637c7c093e7d3eb6d3cf14bee38c10 size: 949 The push refers to a repository [localhost:33567/kubevirt/virt-launcher] cb82e0f62ba1: Preparing 99aa745e896e: Preparing 7f1c622839e1: Preparing 30a1b82cb685: Preparing 14a25b061e96: Preparing 53f12636d41e: Preparing da38cf808aa5: Preparing b83399358a92: Preparing 186d8b3e4fd8: Preparing fa6154170bf5: Preparing 5eefb9960a36: Preparing 891e1e4ef82a: Preparing da38cf808aa5: Waiting 186d8b3e4fd8: Waiting 5eefb9960a36: Waiting fa6154170bf5: Waiting 891e1e4ef82a: Waiting 99aa745e896e: Pushed 30a1b82cb685: Pushed cb82e0f62ba1: Pushed b83399358a92: Pushed da38cf808aa5: Pushed 186d8b3e4fd8: Pushed fa6154170bf5: Pushed 891e1e4ef82a: Mounted from kubevirt/virt-controller 7f1c622839e1: Pushed 53f12636d41e: Pushed 14a25b061e96: Pushed 5eefb9960a36: Pushed devel: digest: sha256:69c0d3f5b1ee7a49292af099c9819d02d85b35a326ead2ed1c26f17d97592613 size: 2828 The push refers to a repository [localhost:33567/kubevirt/virt-handler] 036711fdb3d4: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-launcher 036711fdb3d4: Pushed devel: digest: sha256:040197ccaddc52b6f0ecd810050a64f64e400661d5c5703303161625b3e56116 size: 741 The push refers to a repository [localhost:33567/kubevirt/virt-api] 0e7eb0d26c47: Preparing 25755ffecaf3: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-handler 25755ffecaf3: Pushed 0e7eb0d26c47: Pushed devel: digest: sha256:779edd1db574184c32b25a05aeb66ed282d254828a95faf1a6be6b8b39586ae8 size: 948 The push refers to a repository [localhost:33567/kubevirt/disks-images-provider] 5ffe52947a94: Preparing a1bc751fc8a2: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-api 5ffe52947a94: Pushed a1bc751fc8a2: Pushed devel: digest: sha256:06c29a53ec7698ae500eb7d91a90cd7dce1b6297ef96b96fbe4602f54552049c size: 948 The push refers to a repository [localhost:33567/kubevirt/vm-killer] 3a82b543c335: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/disks-images-provider 3a82b543c335: Pushed devel: digest: sha256:70d5924a604e2730aa003adf161b8173746d0f85e98994a65f3b256f513ac559 size: 740 The push refers to a repository [localhost:33567/kubevirt/registry-disk-v1alpha] cb3d1019d03e: Preparing 626899eeec02: Preparing 132d61a890c5: Preparing cb3d1019d03e: Pushed 626899eeec02: Pushed 132d61a890c5: Pushed devel: digest: sha256:72fd0c47c0c44c7977693a12cc05d64421f46faa0c1aba8b8056baf15d72f83f size: 948 The push refers to a repository [localhost:33567/kubevirt/cirros-registry-disk-demo] d06cf28520bd: Preparing cb3d1019d03e: Preparing 626899eeec02: Preparing 132d61a890c5: Preparing 132d61a890c5: Mounted from kubevirt/registry-disk-v1alpha cb3d1019d03e: Mounted from kubevirt/registry-disk-v1alpha 626899eeec02: Mounted from kubevirt/registry-disk-v1alpha d06cf28520bd: Pushed devel: digest: sha256:43c576c72c81f5011589375db8d7bb158f25124bd6c537b981b36a4502fb8b36 size: 1160 The push refers to a repository [localhost:33567/kubevirt/fedora-cloud-registry-disk-demo] ebb5671f5e29: Preparing cb3d1019d03e: Preparing 626899eeec02: Preparing 132d61a890c5: Preparing 626899eeec02: Mounted from kubevirt/cirros-registry-disk-demo 132d61a890c5: Mounted from kubevirt/cirros-registry-disk-demo cb3d1019d03e: Mounted from kubevirt/cirros-registry-disk-demo ebb5671f5e29: Pushed devel: digest: sha256:fb088d5bfda6184fc3900f0386f933f62443246dc3b43949ba6f1654ad2fe700 size: 1161 The push refers to a repository [localhost:33567/kubevirt/alpine-registry-disk-demo] f266c6313b5c: Preparing cb3d1019d03e: Preparing 626899eeec02: Preparing 132d61a890c5: Preparing 132d61a890c5: Mounted from kubevirt/fedora-cloud-registry-disk-demo 626899eeec02: Mounted from kubevirt/fedora-cloud-registry-disk-demo cb3d1019d03e: Mounted from kubevirt/fedora-cloud-registry-disk-demo f266c6313b5c: Pushed devel: digest: sha256:3678d344eb613bcd9c858ae918591c50bbdb7bb86a94ff84e8c4ca1a4ad9081a size: 1160 The push refers to a repository [localhost:33567/kubevirt/subresource-access-test] 00366115ec3d: Preparing 5c35b999e0e4: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/vm-killer 5c35b999e0e4: Pushed 00366115ec3d: Pushed devel: digest: sha256:0a313285c0ed98aed1a59202a252931cf231054dd82e5dc2882dd7c4d2621e6f size: 948 The push refers to a repository [localhost:33567/kubevirt/winrmcli] d8f4160f7568: Preparing b34315236250: Preparing b4a3c0429828: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/subresource-access-test d8f4160f7568: Pushed b4a3c0429828: Pushed b34315236250: Pushed devel: digest: sha256:04d386314e1d8b78994f96b9f885b5c4f9faaeefcc2283d572ef9530eb92c472 size: 1165 The push refers to a repository [localhost:33567/kubevirt/example-hook-sidecar] bdc122c0f882: Preparing 39bae602f753: Preparing bdc122c0f882: Pushed 39bae602f753: Pushed devel: digest: sha256:9ea59d1bd29a02c36bfcebe529c0af9d8bd69e07cdd6b2e9c20272d4df85b457 size: 740 make[1]: Leaving directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' Done ./cluster/clean.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release0 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release0 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-109-gf89d23a ++ KUBEVIRT_VERSION=v0.7.0-109-gf89d23a + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:50a4b8ee3e07d592e7e4fbf3eb1401980a5947499dfdc3d847c085b5775aaa9a ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:33567/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Cleaning up ...' Cleaning up ... + cluster/kubectl.sh get vmis --all-namespaces -o=custom-columns=NAME:.metadata.name,NAMESPACE:.metadata.namespace,FINALIZERS:.metadata.finalizers --no-headers + grep foregroundDeleteVirtualMachine + read p error: the server doesn't have a resource type "vmis" + _kubectl delete ds -l kubevirt.io -n kube-system --cascade=false --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=libvirt --force --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=virt-handler --force --grace-period 0 No resources found + namespaces=(default ${namespace}) + for i in '${namespaces[@]}' + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete deployment -l kubevirt.io No resources found + _kubectl -n default delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rs -l kubevirt.io No resources found + _kubectl -n default delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete services -l kubevirt.io No resources found + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n default delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete secrets -l kubevirt.io No resources found + _kubectl -n default delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pv -l kubevirt.io No resources found + _kubectl -n default delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pvc -l kubevirt.io No resources found + _kubectl -n default delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete ds -l kubevirt.io No resources found + _kubectl -n default delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n default delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pods -l kubevirt.io No resources found + _kubectl -n default delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n default delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rolebinding -l kubevirt.io No resources found + _kubectl -n default delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete roles -l kubevirt.io No resources found + _kubectl -n default delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterroles -l kubevirt.io No resources found + _kubectl -n default delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n default get crd offlinevirtualmachines.kubevirt.io ++ wc -l ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n default get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + for i in '${namespaces[@]}' + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete deployment -l kubevirt.io No resources found + _kubectl -n kube-system delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rs -l kubevirt.io No resources found + _kubectl -n kube-system delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete services -l kubevirt.io No resources found + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n kube-system delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete secrets -l kubevirt.io No resources found + _kubectl -n kube-system delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pv -l kubevirt.io No resources found + _kubectl -n kube-system delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pvc -l kubevirt.io No resources found + _kubectl -n kube-system delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete ds -l kubevirt.io No resources found + _kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n kube-system delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pods -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete roles -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterroles -l kubevirt.io No resources found + _kubectl -n kube-system delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io ++ wc -l ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + sleep 2 + echo Done Done ./cluster/deploy.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release0 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release0 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-109-gf89d23a ++ KUBEVIRT_VERSION=v0.7.0-109-gf89d23a + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:50a4b8ee3e07d592e7e4fbf3eb1401980a5947499dfdc3d847c085b5775aaa9a ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:33567/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Deploying ...' Deploying ... + [[ -z openshift-3.10-release ]] + [[ openshift-3.10-release =~ .*-dev ]] + [[ openshift-3.10-release =~ .*-release ]] + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/demo-content.yaml =~ .*demo.* ]] + continue + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml =~ .*demo.* ]] + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml clusterrole.rbac.authorization.k8s.io "kubevirt.io:admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:edit" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:view" created serviceaccount "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver-auth-delegator" created rolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created role.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-controller" created serviceaccount "kubevirt-controller" created serviceaccount "kubevirt-privileged" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller-cluster-admin" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-privileged-cluster-admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:default" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt.io:default" created service "virt-api" created deployment.extensions "virt-api" created deployment.extensions "virt-controller" created daemonset.extensions "virt-handler" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstances.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancereplicasets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancepresets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachines.kubevirt.io" created + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R persistentvolumeclaim "disk-alpine" created persistentvolume "host-path-disk-alpine" created persistentvolumeclaim "disk-custom" created persistentvolume "host-path-disk-custom" created daemonset.extensions "disks-images-provider" created serviceaccount "kubevirt-testing" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-testing-cluster-admin" created + [[ os-3.10.0 =~ os-* ]] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-controller"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-testing"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-privileged"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-apiserver"] + _kubectl adm policy add-scc-to-user privileged admin + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged admin scc "privileged" added to: ["admin"] + echo Done Done + namespaces=(kube-system default) + [[ kube-system != \k\u\b\e\-\s\y\s\t\e\m ]] + timeout=300 + sample=30 + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n kube-system --no-headers ++ cluster/kubectl.sh get pods -n kube-system --no-headers ++ grep -v Running + '[' -n 'disks-images-provider-jt7zs 0/1 ContainerCreating 0 5s disks-images-provider-jz9rx 0/1 ContainerCreating 0 4s virt-api-7d79764579-qxdsk 0/1 ContainerCreating 0 7s virt-api-7d79764579-wfvcn 0/1 ContainerCreating 0 7s virt-controller-7d57d96b65-h72d2 0/1 ContainerCreating 0 7s virt-handler-cqcks 0/1 ContainerCreating 0 7s virt-handler-swxsf 0/1 ContainerCreating 0 7s' ']' + echo 'Waiting for kubevirt pods to enter the Running state ...' Waiting for kubevirt pods to enter the Running state ... + kubectl get pods -n kube-system --no-headers + cluster/kubectl.sh get pods -n kube-system --no-headers + grep -v Running disks-images-provider-jt7zs 0/1 ContainerCreating 0 6s disks-images-provider-jz9rx 0/1 ContainerCreating 0 5s virt-api-7d79764579-qxdsk 0/1 ContainerCreating 0 8s virt-api-7d79764579-wfvcn 0/1 ContainerCreating 0 8s virt-controller-7d57d96b65-h72d2 0/1 ContainerCreating 0 8s virt-handler-cqcks 0/1 ContainerCreating 0 8s virt-handler-swxsf 0/1 ContainerCreating 0 8s + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system --no-headers ++ cluster/kubectl.sh get pods -n kube-system --no-headers ++ grep -v Running + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false + '[' -n false ']' + echo 'Waiting for KubeVirt containers to become ready ...' Waiting for KubeVirt containers to become ready ... + kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + grep false + cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers false + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n '' ']' + kubectl get pods -n kube-system + cluster/kubectl.sh get pods -n kube-system NAME READY STATUS RESTARTS AGE disks-images-provider-jt7zs 1/1 Running 0 1m disks-images-provider-jz9rx 1/1 Running 0 1m master-api-node01 1/1 Running 1 17d master-controllers-node01 1/1 Running 1 17d master-etcd-node01 1/1 Running 1 17d virt-api-7d79764579-qxdsk 1/1 Running 0 1m virt-api-7d79764579-wfvcn 1/1 Running 0 1m virt-controller-7d57d96b65-blgfz 1/1 Running 0 1m virt-controller-7d57d96b65-h72d2 1/1 Running 0 1m virt-handler-cqcks 1/1 Running 0 1m virt-handler-swxsf 1/1 Running 0 1m + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n default --no-headers ++ cluster/kubectl.sh get pods -n default --no-headers ++ grep -v Running + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n '' ']' + kubectl get pods -n default + cluster/kubectl.sh get pods -n default NAME READY STATUS RESTARTS AGE docker-registry-1-rl562 1/1 Running 1 17d registry-console-1-rw9zf 1/1 Running 1 17d router-1-6cch9 1/1 Running 1 17d + kubectl version + cluster/kubectl.sh version oc v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 features: Basic-Auth GSSAPI Kerberos SPNEGO Server https://127.0.0.1:33564 openshift v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 + ginko_params='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + [[ openshift-3.10-release =~ windows.* ]] + FUNC_TEST_ARGS='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + make functest hack/dockerized "hack/build-func-tests.sh" sha256:8314c812ee3200233db076e79036b39759d30fc3dd7fe921b323b19b14306fd6 go version go1.10 linux/amd64 go version go1.10 linux/amd64 Compiling tests... compiled tests.test hack/functests.sh Running Suite: Tests Suite ========================== Random Seed: 1532506838 Will run 145 of 145 specs • [SLOW TEST:48.623 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with Disk PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:34.737 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with CDRom PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:116.934 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started and stopped multiple times /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with Disk PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:110.065 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started and stopped multiple times /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with CDRom PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:44.155 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With an emptyDisk defined /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:113 should create a writeable emptyDisk with the right capacity /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:115 ------------------------------ • [SLOW TEST:45.494 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With an emptyDisk defined and a specified serial number /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:163 should create a writeable emptyDisk with the specified serial number /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:165 ------------------------------ • [SLOW TEST:35.534 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With ephemeral alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:205 should be successfully started /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:207 ------------------------------ Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:29:07 http: TLS handshake error from 10.129.0.1:43150: EOF 2018/07/25 08:29:17 http: TLS handshake error from 10.129.0.1:43156: EOF 2018/07/25 08:29:27 http: TLS handshake error from 10.129.0.1:43162: EOF 2018/07/25 08:29:37 http: TLS handshake error from 10.129.0.1:43170: EOF 2018/07/25 08:29:47 http: TLS handshake error from 10.129.0.1:43178: EOF 2018/07/25 08:29:57 http: TLS handshake error from 10.129.0.1:43186: EOF 2018/07/25 08:30:07 http: TLS handshake error from 10.129.0.1:43194: EOF level=info timestamp=2018-07-25T08:30:09.414066Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:30:17 http: TLS handshake error from 10.129.0.1:43202: EOF 2018/07/25 08:30:27 http: TLS handshake error from 10.129.0.1:43210: EOF 2018/07/25 08:30:37 http: TLS handshake error from 10.129.0.1:43218: EOF level=info timestamp=2018-07-25T08:30:39.390079Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:30:47 http: TLS handshake error from 10.129.0.1:43226: EOF 2018/07/25 08:30:57 http: TLS handshake error from 10.129.0.1:43234: EOF 2018/07/25 08:31:07 http: TLS handshake error from 10.129.0.1:43242: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running level=info timestamp=2018-07-25T08:29:09.199051Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:29:15 http: TLS handshake error from 10.129.0.1:42822: EOF 2018/07/25 08:29:25 http: TLS handshake error from 10.129.0.1:42828: EOF 2018/07/25 08:29:35 http: TLS handshake error from 10.129.0.1:42836: EOF level=info timestamp=2018-07-25T08:29:39.425259Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:29:45 http: TLS handshake error from 10.129.0.1:42844: EOF 2018/07/25 08:29:55 http: TLS handshake error from 10.129.0.1:42852: EOF 2018/07/25 08:30:05 http: TLS handshake error from 10.129.0.1:42860: EOF 2018/07/25 08:30:15 http: TLS handshake error from 10.129.0.1:42868: EOF 2018/07/25 08:30:25 http: TLS handshake error from 10.129.0.1:42876: EOF 2018/07/25 08:30:35 http: TLS handshake error from 10.129.0.1:42884: EOF 2018/07/25 08:30:45 http: TLS handshake error from 10.129.0.1:42892: EOF 2018/07/25 08:30:55 http: TLS handshake error from 10.129.0.1:42900: EOF 2018/07/25 08:31:05 http: TLS handshake error from 10.129.0.1:42908: EOF level=info timestamp=2018-07-25T08:31:09.395427Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:19:13.361649Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 level=info timestamp=2018-07-25T08:28:37.949874Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-07-25T08:28:37.955679Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-07-25T08:28:37.972650Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-07-25T08:28:37.972737Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-07-25T08:28:37.972782Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-07-25T08:28:37.972824Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-25T08:28:37.972896Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-25T08:28:37.973257Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-25T08:28:37.984055Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-25T08:28:37.985259Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-25T08:28:37.994252Z pos=preset.go:71 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-25T08:28:37.990335Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:29:13.381125Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-07-25T08:29:13.388141Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-25T08:29:13.389040Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-25T08:29:13.507614Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-25T08:29:13.556007Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-07-25T08:29:13.575635Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-25T08:29:13.793053Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi8gwps kind=VirtualMachineInstance uid=a1c3c022-8fe4-11e8-bb2b-525500d15501 msg="No update processing required" level=info timestamp=2018-07-25T08:29:13.834583Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi8gwps kind=VirtualMachineInstance uid=a1c3c022-8fe4-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:29:13.933573Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi8gwps kind= uid=a1c3c022-8fe4-11e8-bb2b-525500d15501 msg="No update processing required" level=info timestamp=2018-07-25T08:29:13.934201Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi8gwps kind= uid=a1c3c022-8fe4-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:19:17.885686Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-25T08:19:17.899675Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-25T08:19:17.910064Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-25T08:19:18.000155Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-25T08:19:18.072641Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-25T08:19:18.087721Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-launcher-testvmi8gwps-fssv8 Pod phase: Running level=info timestamp=2018-07-25T08:29:05.508892Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-25T08:29:05.511145Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-25T08:29:05.519680Z pos=libvirt.go:256 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-25T08:29:15.536974Z pos=libvirt.go:271 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-25T08:29:15.625079Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi8gwps" level=info timestamp=2018-07-25T08:29:15.628018Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-25T08:29:15.629235Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [198.191 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With ephemeral alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:205 should not persist data [It] /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:218 Expected error: : 180000000000 expect: timer expired after 180 seconds not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:226 ------------------------------ STEP: Starting the VirtualMachineInstance STEP: Starting a VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance will start level=info timestamp=2018-07-25T08:27:57.348678Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmi8gwps-fssv8" level=info timestamp=2018-07-25T08:28:12.096986Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmi8gwps-fssv8" level=info timestamp=2018-07-25T08:28:13.998662Z pos=utils.go:243 component=tests msg="VirtualMachineInstance defined." level=info timestamp=2018-07-25T08:28:14.046533Z pos=utils.go:243 component=tests msg="VirtualMachineInstance started." STEP: Writing an arbitrary file to it's EFI partition level=info timestamp=2018-07-25T08:31:14.254273Z pos=utils.go:1257 component=tests namespace=kubevirt-test-default name=testvmi8gwps kind=VirtualMachineInstance uid= msg="Login: [{2 \r\n\r\n\r\nISOLINUX 6.04 6.04-pre1 Copyright (C) 1994-2015 H. Peter Anvin et al\r\nboot: \u001b[?7h\r\n []}]" Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:31:27 http: TLS handshake error from 10.129.0.1:43258: EOF 2018/07/25 08:31:37 http: TLS handshake error from 10.129.0.1:43266: EOF 2018/07/25 08:31:47 http: TLS handshake error from 10.129.0.1:43274: EOF 2018/07/25 08:31:57 http: TLS handshake error from 10.129.0.1:43284: EOF 2018/07/25 08:32:07 http: TLS handshake error from 10.129.0.1:43292: EOF level=info timestamp=2018-07-25T08:32:09.234209Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:32:17 http: TLS handshake error from 10.129.0.1:43300: EOF 2018/07/25 08:32:27 http: TLS handshake error from 10.129.0.1:43308: EOF 2018/07/25 08:32:37 http: TLS handshake error from 10.129.0.1:43316: EOF level=info timestamp=2018-07-25T08:32:39.189539Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:32:47 http: TLS handshake error from 10.129.0.1:43324: EOF 2018/07/25 08:32:57 http: TLS handshake error from 10.129.0.1:43332: EOF 2018/07/25 08:33:07 http: TLS handshake error from 10.129.0.1:43340: EOF level=info timestamp=2018-07-25T08:33:09.295441Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:33:17 http: TLS handshake error from 10.129.0.1:43348: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running level=info timestamp=2018-07-25T08:31:09.395427Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:31:15 http: TLS handshake error from 10.129.0.1:42916: EOF 2018/07/25 08:31:25 http: TLS handshake error from 10.129.0.1:42924: EOF 2018/07/25 08:31:35 http: TLS handshake error from 10.129.0.1:42932: EOF level=info timestamp=2018-07-25T08:31:39.201697Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:31:45 http: TLS handshake error from 10.129.0.1:42940: EOF 2018/07/25 08:31:55 http: TLS handshake error from 10.129.0.1:42948: EOF 2018/07/25 08:32:05 http: TLS handshake error from 10.129.0.1:42958: EOF 2018/07/25 08:32:15 http: TLS handshake error from 10.129.0.1:42966: EOF 2018/07/25 08:32:25 http: TLS handshake error from 10.129.0.1:42974: EOF 2018/07/25 08:32:35 http: TLS handshake error from 10.129.0.1:42982: EOF 2018/07/25 08:32:45 http: TLS handshake error from 10.129.0.1:42990: EOF 2018/07/25 08:32:55 http: TLS handshake error from 10.129.0.1:42998: EOF 2018/07/25 08:33:05 http: TLS handshake error from 10.129.0.1:43006: EOF 2018/07/25 08:33:15 http: TLS handshake error from 10.129.0.1:43014: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:19:13.361649Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 level=info timestamp=2018-07-25T08:28:37.949874Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-07-25T08:28:37.955679Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-07-25T08:28:37.972650Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-07-25T08:28:37.972737Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-07-25T08:28:37.972782Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-07-25T08:28:37.972824Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-25T08:28:37.972896Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-25T08:28:37.973257Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-25T08:28:37.984055Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-25T08:28:37.985259Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-25T08:28:37.994252Z pos=preset.go:71 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-25T08:28:37.990335Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-25T08:31:44.863791Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:31:44.864203Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:31:14.836294Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi8gwps kind= uid=a1c3c022-8fe4-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:31:14.839680Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi8gwps kind= uid=a1c3c022-8fe4-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:31:15.070390Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi8gwps kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:31:15.070741Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi8gwps kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:32:00.986665Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:32:02.418848Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-25T08:32:02.419560Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=Domain uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Domain is in state Paused reason StartingUp" level=info timestamp=2018-07-25T08:32:03.170689Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-25T08:32:03.200645Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=Domain uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-25T08:32:03.224154Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:32:03.225880Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="No update processing required" level=info timestamp=2018-07-25T08:32:03.284128Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-25T08:32:03.322094Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:32:03.326669Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:32:03.349217Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:19:17.885686Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-25T08:19:17.899675Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-25T08:19:17.910064Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-25T08:19:18.000155Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-25T08:19:18.072641Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-25T08:19:18.087721Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-launcher-testvmildfvc-b959v Pod phase: Running level=info timestamp=2018-07-25T08:32:01.419558Z pos=manager.go:157 component=virt-launcher namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Domain defined." level=info timestamp=2018-07-25T08:32:02.391189Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:32:02.420712Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:32:02.430619Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 35a6e56a-1f1f-4aaa-be87-a64610028bf4" level=info timestamp=2018-07-25T08:32:02.431006Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:32:03.102241Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:32:03.159441Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:32:03.179581Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:32:03.184706Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:32:03.205989Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:32:03.206349Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:32:03.280502Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:32:03.291302Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:32:03.338683Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:32:03.436615Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 35a6e56a-1f1f-4aaa-be87-a64610028bf4: 190" • Failure [124.276 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With VirtualMachineInstance with two PVCs /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:266 should start vmi multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:278 Timed out after 123.057s. Expected error: <*errors.StatusError | 0xc420152510>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:64 ------------------------------ STEP: Starting and stopping the VirtualMachineInstance number of times STEP: Starting a VirtualMachineInstance Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:31:57 http: TLS handshake error from 10.129.0.1:43284: EOF 2018/07/25 08:32:07 http: TLS handshake error from 10.129.0.1:43292: EOF level=info timestamp=2018-07-25T08:32:09.234209Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:32:17 http: TLS handshake error from 10.129.0.1:43300: EOF 2018/07/25 08:32:27 http: TLS handshake error from 10.129.0.1:43308: EOF 2018/07/25 08:32:37 http: TLS handshake error from 10.129.0.1:43316: EOF level=info timestamp=2018-07-25T08:32:39.189539Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:32:47 http: TLS handshake error from 10.129.0.1:43324: EOF 2018/07/25 08:32:57 http: TLS handshake error from 10.129.0.1:43332: EOF 2018/07/25 08:33:07 http: TLS handshake error from 10.129.0.1:43340: EOF level=info timestamp=2018-07-25T08:33:09.295441Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:33:17 http: TLS handshake error from 10.129.0.1:43348: EOF 2018/07/25 08:33:27 http: TLS handshake error from 10.129.0.1:43356: EOF 2018/07/25 08:33:37 http: TLS handshake error from 10.129.0.1:43364: EOF 2018/07/25 08:33:47 http: TLS handshake error from 10.129.0.1:43372: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running level=info timestamp=2018-07-25T08:31:39.201697Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:31:45 http: TLS handshake error from 10.129.0.1:42940: EOF 2018/07/25 08:31:55 http: TLS handshake error from 10.129.0.1:42948: EOF 2018/07/25 08:32:05 http: TLS handshake error from 10.129.0.1:42958: EOF 2018/07/25 08:32:15 http: TLS handshake error from 10.129.0.1:42966: EOF 2018/07/25 08:32:25 http: TLS handshake error from 10.129.0.1:42974: EOF 2018/07/25 08:32:35 http: TLS handshake error from 10.129.0.1:42982: EOF 2018/07/25 08:32:45 http: TLS handshake error from 10.129.0.1:42990: EOF 2018/07/25 08:32:55 http: TLS handshake error from 10.129.0.1:42998: EOF 2018/07/25 08:33:05 http: TLS handshake error from 10.129.0.1:43006: EOF 2018/07/25 08:33:15 http: TLS handshake error from 10.129.0.1:43014: EOF 2018/07/25 08:33:25 http: TLS handshake error from 10.129.0.1:43022: EOF 2018/07/25 08:33:35 http: TLS handshake error from 10.129.0.1:43030: EOF level=info timestamp=2018-07-25T08:33:39.466687Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:33:45 http: TLS handshake error from 10.129.0.1:43038: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:28:37.972650Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-07-25T08:28:37.972737Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-07-25T08:28:37.972782Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-07-25T08:28:37.972824Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-25T08:28:37.972896Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-25T08:28:37.973257Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-25T08:28:37.984055Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-25T08:28:37.985259Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-25T08:28:37.994252Z pos=preset.go:71 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-25T08:28:37.990335Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-25T08:31:44.863791Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:31:44.864203Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:33:48.747940Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiflzln kind= uid=61e24b0d-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:33:48.748490Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiflzln kind= uid=61e24b0d-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:33:48.919510Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiflzln\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiflzln" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:32:03.322094Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:32:03.326669Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:32:03.349217Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.295425Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:33:19.307022Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:33:19.404526Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmildfvc" level=info timestamp=2018-07-25T08:33:19.666866Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-25T08:33:19.667207Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.667453Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-25T08:33:19.667534Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-25T08:33:19.667487Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-25T08:33:19.668372Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.668593Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:33:19.668794Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.679040Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:19:17.885686Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-25T08:19:17.899675Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-25T08:19:17.910064Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-25T08:19:18.000155Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-25T08:19:18.072641Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-25T08:19:18.087721Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-launcher-testvmiflzln-ffhsm Pod phase: Pending • Failure [30.902 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should successfully start with hook sidecar annotation [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:60 Expected error: <*errors.StatusError | 0xc42036aa20>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:63 ------------------------------ STEP: Starting a VMI Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:32:27 http: TLS handshake error from 10.129.0.1:43308: EOF 2018/07/25 08:32:37 http: TLS handshake error from 10.129.0.1:43316: EOF level=info timestamp=2018-07-25T08:32:39.189539Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:32:47 http: TLS handshake error from 10.129.0.1:43324: EOF 2018/07/25 08:32:57 http: TLS handshake error from 10.129.0.1:43332: EOF 2018/07/25 08:33:07 http: TLS handshake error from 10.129.0.1:43340: EOF level=info timestamp=2018-07-25T08:33:09.295441Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:33:17 http: TLS handshake error from 10.129.0.1:43348: EOF 2018/07/25 08:33:27 http: TLS handshake error from 10.129.0.1:43356: EOF 2018/07/25 08:33:37 http: TLS handshake error from 10.129.0.1:43364: EOF 2018/07/25 08:33:47 http: TLS handshake error from 10.129.0.1:43372: EOF 2018/07/25 08:33:57 http: TLS handshake error from 10.129.0.1:43380: EOF 2018/07/25 08:34:07 http: TLS handshake error from 10.129.0.1:43388: EOF level=info timestamp=2018-07-25T08:34:09.234057Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:34:17 http: TLS handshake error from 10.129.0.1:43396: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running 2018/07/25 08:32:05 http: TLS handshake error from 10.129.0.1:42958: EOF 2018/07/25 08:32:15 http: TLS handshake error from 10.129.0.1:42966: EOF 2018/07/25 08:32:25 http: TLS handshake error from 10.129.0.1:42974: EOF 2018/07/25 08:32:35 http: TLS handshake error from 10.129.0.1:42982: EOF 2018/07/25 08:32:45 http: TLS handshake error from 10.129.0.1:42990: EOF 2018/07/25 08:32:55 http: TLS handshake error from 10.129.0.1:42998: EOF 2018/07/25 08:33:05 http: TLS handshake error from 10.129.0.1:43006: EOF 2018/07/25 08:33:15 http: TLS handshake error from 10.129.0.1:43014: EOF 2018/07/25 08:33:25 http: TLS handshake error from 10.129.0.1:43022: EOF 2018/07/25 08:33:35 http: TLS handshake error from 10.129.0.1:43030: EOF level=info timestamp=2018-07-25T08:33:39.466687Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:33:45 http: TLS handshake error from 10.129.0.1:43038: EOF 2018/07/25 08:33:55 http: TLS handshake error from 10.129.0.1:43046: EOF 2018/07/25 08:34:05 http: TLS handshake error from 10.129.0.1:43054: EOF 2018/07/25 08:34:15 http: TLS handshake error from 10.129.0.1:43062: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:28:37.972896Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-25T08:28:37.973257Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-25T08:28:37.984055Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-25T08:28:37.985259Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-25T08:28:37.994252Z pos=preset.go:71 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-25T08:28:37.990335Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-25T08:31:44.863791Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:31:44.864203Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:33:48.747940Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiflzln kind= uid=61e24b0d-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:33:48.748490Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiflzln kind= uid=61e24b0d-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:33:48.919510Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiflzln\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiflzln" level=info timestamp=2018-07-25T08:34:19.682064Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqmmr9 kind= uid=7448e0b2-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:34:19.683084Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqmmr9 kind= uid=7448e0b2-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:34:19.965189Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqmmr9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqmmr9" level=info timestamp=2018-07-25T08:34:20.029335Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqmmr9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqmmr9" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:32:03.322094Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:32:03.326669Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:32:03.349217Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.295425Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:33:19.307022Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:33:19.404526Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmildfvc" level=info timestamp=2018-07-25T08:33:19.666866Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-25T08:33:19.667207Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.667453Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-25T08:33:19.667534Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-25T08:33:19.667487Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-25T08:33:19.668372Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.668593Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:33:19.668794Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.679040Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:19:17.885686Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-25T08:19:17.899675Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-25T08:19:17.910064Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-25T08:19:18.000155Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-25T08:19:18.072641Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-25T08:19:18.087721Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-launcher-testvmiqmmr9-7x2xt Pod phase: Pending • Failure [30.859 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should call Collect and OnDefineDomain on the hook sidecar [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:67 Expected error: <*errors.StatusError | 0xc42083c3f0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:70 ------------------------------ STEP: Getting hook-sidecar logs Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:32:57 http: TLS handshake error from 10.129.0.1:43332: EOF 2018/07/25 08:33:07 http: TLS handshake error from 10.129.0.1:43340: EOF level=info timestamp=2018-07-25T08:33:09.295441Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:33:17 http: TLS handshake error from 10.129.0.1:43348: EOF 2018/07/25 08:33:27 http: TLS handshake error from 10.129.0.1:43356: EOF 2018/07/25 08:33:37 http: TLS handshake error from 10.129.0.1:43364: EOF 2018/07/25 08:33:47 http: TLS handshake error from 10.129.0.1:43372: EOF 2018/07/25 08:33:57 http: TLS handshake error from 10.129.0.1:43380: EOF 2018/07/25 08:34:07 http: TLS handshake error from 10.129.0.1:43388: EOF level=info timestamp=2018-07-25T08:34:09.234057Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:34:17 http: TLS handshake error from 10.129.0.1:43396: EOF 2018/07/25 08:34:27 http: TLS handshake error from 10.129.0.1:43404: EOF 2018/07/25 08:34:37 http: TLS handshake error from 10.129.0.1:43412: EOF level=info timestamp=2018-07-25T08:34:39.173487Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:34:47 http: TLS handshake error from 10.129.0.1:43420: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running 2018/07/25 08:32:35 http: TLS handshake error from 10.129.0.1:42982: EOF 2018/07/25 08:32:45 http: TLS handshake error from 10.129.0.1:42990: EOF 2018/07/25 08:32:55 http: TLS handshake error from 10.129.0.1:42998: EOF 2018/07/25 08:33:05 http: TLS handshake error from 10.129.0.1:43006: EOF 2018/07/25 08:33:15 http: TLS handshake error from 10.129.0.1:43014: EOF 2018/07/25 08:33:25 http: TLS handshake error from 10.129.0.1:43022: EOF 2018/07/25 08:33:35 http: TLS handshake error from 10.129.0.1:43030: EOF level=info timestamp=2018-07-25T08:33:39.466687Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:33:45 http: TLS handshake error from 10.129.0.1:43038: EOF 2018/07/25 08:33:55 http: TLS handshake error from 10.129.0.1:43046: EOF 2018/07/25 08:34:05 http: TLS handshake error from 10.129.0.1:43054: EOF 2018/07/25 08:34:15 http: TLS handshake error from 10.129.0.1:43062: EOF 2018/07/25 08:34:25 http: TLS handshake error from 10.129.0.1:43070: EOF 2018/07/25 08:34:35 http: TLS handshake error from 10.129.0.1:43078: EOF 2018/07/25 08:34:45 http: TLS handshake error from 10.129.0.1:43086: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:28:37.990335Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-25T08:31:44.863791Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:31:44.864203Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:33:48.747940Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiflzln kind= uid=61e24b0d-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:33:48.748490Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiflzln kind= uid=61e24b0d-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:33:48.919510Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiflzln\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiflzln" level=info timestamp=2018-07-25T08:34:19.682064Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqmmr9 kind= uid=7448e0b2-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:34:19.683084Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqmmr9 kind= uid=7448e0b2-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:34:19.965189Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqmmr9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqmmr9" level=info timestamp=2018-07-25T08:34:20.029335Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqmmr9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqmmr9" level=info timestamp=2018-07-25T08:34:20.349631Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqmmr9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqmmr9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 7448e0b2-8fe5-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqmmr9" level=info timestamp=2018-07-25T08:34:50.528503Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwklwv kind= uid=86ac8b3a-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:34:50.530709Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwklwv kind= uid=86ac8b3a-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:34:50.819867Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwklwv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwklwv" level=info timestamp=2018-07-25T08:34:50.841681Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwklwv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwklwv" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:32:03.322094Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:32:03.326669Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:32:03.349217Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.295425Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:33:19.307022Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:33:19.404526Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmildfvc" level=info timestamp=2018-07-25T08:33:19.666866Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-25T08:33:19.667207Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.667453Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-25T08:33:19.667534Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-25T08:33:19.667487Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-25T08:33:19.668372Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.668593Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:33:19.668794Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.679040Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:19:17.885686Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-25T08:19:17.899675Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-25T08:19:17.910064Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-25T08:19:18.000155Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-25T08:19:18.072641Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-25T08:19:18.087721Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-launcher-testvmiwklwv-vlqtk Pod phase: Pending • Failure [30.902 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should update domain XML with SM BIOS properties [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:83 Expected error: <*errors.errorString | 0xc42069e510>: { s: "resource name may not be empty", } resource name may not be empty not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1029 ------------------------------ STEP: Reading domain XML using virsh Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:33:27 http: TLS handshake error from 10.129.0.1:43356: EOF 2018/07/25 08:33:37 http: TLS handshake error from 10.129.0.1:43364: EOF 2018/07/25 08:33:47 http: TLS handshake error from 10.129.0.1:43372: EOF 2018/07/25 08:33:57 http: TLS handshake error from 10.129.0.1:43380: EOF 2018/07/25 08:34:07 http: TLS handshake error from 10.129.0.1:43388: EOF level=info timestamp=2018-07-25T08:34:09.234057Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:34:17 http: TLS handshake error from 10.129.0.1:43396: EOF 2018/07/25 08:34:27 http: TLS handshake error from 10.129.0.1:43404: EOF 2018/07/25 08:34:37 http: TLS handshake error from 10.129.0.1:43412: EOF level=info timestamp=2018-07-25T08:34:39.173487Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:34:47 http: TLS handshake error from 10.129.0.1:43420: EOF 2018/07/25 08:34:57 http: TLS handshake error from 10.129.0.1:43430: EOF 2018/07/25 08:35:07 http: TLS handshake error from 10.129.0.1:43438: EOF level=info timestamp=2018-07-25T08:35:09.478888Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:35:17 http: TLS handshake error from 10.129.0.1:43446: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running 2018/07/25 08:33:25 http: TLS handshake error from 10.129.0.1:43022: EOF 2018/07/25 08:33:35 http: TLS handshake error from 10.129.0.1:43030: EOF level=info timestamp=2018-07-25T08:33:39.466687Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:33:45 http: TLS handshake error from 10.129.0.1:43038: EOF 2018/07/25 08:33:55 http: TLS handshake error from 10.129.0.1:43046: EOF 2018/07/25 08:34:05 http: TLS handshake error from 10.129.0.1:43054: EOF 2018/07/25 08:34:15 http: TLS handshake error from 10.129.0.1:43062: EOF 2018/07/25 08:34:25 http: TLS handshake error from 10.129.0.1:43070: EOF 2018/07/25 08:34:35 http: TLS handshake error from 10.129.0.1:43078: EOF 2018/07/25 08:34:45 http: TLS handshake error from 10.129.0.1:43086: EOF 2018/07/25 08:34:55 http: TLS handshake error from 10.129.0.1:43094: EOF 2018/07/25 08:35:05 http: TLS handshake error from 10.129.0.1:43104: EOF level=info timestamp=2018-07-25T08:35:08.830168Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-25T08:35:08.880490Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:35:15 http: TLS handshake error from 10.129.0.1:43112: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:33:48.747940Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiflzln kind= uid=61e24b0d-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:33:48.748490Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiflzln kind= uid=61e24b0d-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:33:48.919510Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiflzln\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiflzln" level=info timestamp=2018-07-25T08:34:19.682064Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqmmr9 kind= uid=7448e0b2-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:34:19.683084Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqmmr9 kind= uid=7448e0b2-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:34:19.965189Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqmmr9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqmmr9" level=info timestamp=2018-07-25T08:34:20.029335Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqmmr9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqmmr9" level=info timestamp=2018-07-25T08:34:20.349631Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqmmr9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqmmr9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 7448e0b2-8fe5-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqmmr9" level=info timestamp=2018-07-25T08:34:50.528503Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwklwv kind= uid=86ac8b3a-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:34:50.530709Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwklwv kind= uid=86ac8b3a-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:34:50.819867Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwklwv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwklwv" level=info timestamp=2018-07-25T08:34:50.841681Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwklwv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwklwv" level=info timestamp=2018-07-25T08:34:51.281791Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwklwv\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwklwv, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 86ac8b3a-8fe5-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwklwv" level=info timestamp=2018-07-25T08:35:21.464821Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrb64 kind= uid=991db4fb-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:35:21.465458Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrb64 kind= uid=991db4fb-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:32:03.322094Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:32:03.326669Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:32:03.349217Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.295425Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:33:19.307022Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:33:19.404526Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmildfvc" level=info timestamp=2018-07-25T08:33:19.666866Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-25T08:33:19.667207Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.667453Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-25T08:33:19.667534Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-25T08:33:19.667487Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-25T08:33:19.668372Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.668593Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:33:19.668794Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.679040Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:19:17.885686Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-25T08:19:17.899675Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-25T08:19:17.910064Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-25T08:19:18.000155Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-25T08:19:18.072641Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-25T08:19:18.087721Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-launcher-testvmifrb64-qcv4d Pod phase: Pending • Failure [30.898 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting and stopping the same VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:90 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:91 should success multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:92 Expected <*errors.StatusError | 0xc42083d950>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:98 ------------------------------ STEP: Starting the VirtualMachineInstance Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:34:07 http: TLS handshake error from 10.129.0.1:43388: EOF level=info timestamp=2018-07-25T08:34:09.234057Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:34:17 http: TLS handshake error from 10.129.0.1:43396: EOF 2018/07/25 08:34:27 http: TLS handshake error from 10.129.0.1:43404: EOF 2018/07/25 08:34:37 http: TLS handshake error from 10.129.0.1:43412: EOF level=info timestamp=2018-07-25T08:34:39.173487Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:34:47 http: TLS handshake error from 10.129.0.1:43420: EOF 2018/07/25 08:34:57 http: TLS handshake error from 10.129.0.1:43430: EOF 2018/07/25 08:35:07 http: TLS handshake error from 10.129.0.1:43438: EOF level=info timestamp=2018-07-25T08:35:09.478888Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:35:17 http: TLS handshake error from 10.129.0.1:43446: EOF 2018/07/25 08:35:27 http: TLS handshake error from 10.129.0.1:43454: EOF 2018/07/25 08:35:37 http: TLS handshake error from 10.129.0.1:43462: EOF level=info timestamp=2018-07-25T08:35:39.334195Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:35:47 http: TLS handshake error from 10.129.0.1:43470: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running 2018/07/25 08:33:45 http: TLS handshake error from 10.129.0.1:43038: EOF 2018/07/25 08:33:55 http: TLS handshake error from 10.129.0.1:43046: EOF 2018/07/25 08:34:05 http: TLS handshake error from 10.129.0.1:43054: EOF 2018/07/25 08:34:15 http: TLS handshake error from 10.129.0.1:43062: EOF 2018/07/25 08:34:25 http: TLS handshake error from 10.129.0.1:43070: EOF 2018/07/25 08:34:35 http: TLS handshake error from 10.129.0.1:43078: EOF 2018/07/25 08:34:45 http: TLS handshake error from 10.129.0.1:43086: EOF 2018/07/25 08:34:55 http: TLS handshake error from 10.129.0.1:43094: EOF 2018/07/25 08:35:05 http: TLS handshake error from 10.129.0.1:43104: EOF level=info timestamp=2018-07-25T08:35:08.830168Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-25T08:35:08.880490Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:35:15 http: TLS handshake error from 10.129.0.1:43112: EOF 2018/07/25 08:35:25 http: TLS handshake error from 10.129.0.1:43120: EOF 2018/07/25 08:35:35 http: TLS handshake error from 10.129.0.1:43128: EOF 2018/07/25 08:35:45 http: TLS handshake error from 10.129.0.1:43136: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:34:19.965189Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqmmr9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqmmr9" level=info timestamp=2018-07-25T08:34:20.029335Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqmmr9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqmmr9" level=info timestamp=2018-07-25T08:34:20.349631Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqmmr9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqmmr9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 7448e0b2-8fe5-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqmmr9" level=info timestamp=2018-07-25T08:34:50.528503Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwklwv kind= uid=86ac8b3a-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:34:50.530709Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwklwv kind= uid=86ac8b3a-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:34:50.819867Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwklwv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwklwv" level=info timestamp=2018-07-25T08:34:50.841681Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwklwv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwklwv" level=info timestamp=2018-07-25T08:34:51.281791Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwklwv\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwklwv, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 86ac8b3a-8fe5-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwklwv" level=info timestamp=2018-07-25T08:35:21.464821Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrb64 kind= uid=991db4fb-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:35:21.465458Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrb64 kind= uid=991db4fb-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:35:22.117546Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifrb64\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmifrb64, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 991db4fb-8fe5-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifrb64" level=info timestamp=2018-07-25T08:35:52.311601Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwxhw9 kind= uid=ab81f952-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:35:52.313123Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwxhw9 kind= uid=ab81f952-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:35:52.565544Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwxhw9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwxhw9" level=info timestamp=2018-07-25T08:35:52.649050Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwxhw9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwxhw9" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:32:03.322094Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:32:03.326669Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:32:03.349217Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.295425Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:33:19.307022Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:33:19.404526Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmildfvc" level=info timestamp=2018-07-25T08:33:19.666866Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-25T08:33:19.667207Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.667453Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-25T08:33:19.667534Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-25T08:33:19.667487Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-25T08:33:19.668372Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.668593Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:33:19.668794Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.679040Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:19:17.885686Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-25T08:19:17.899675Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-25T08:19:17.910064Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-25T08:19:18.000155Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-25T08:19:18.072641Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-25T08:19:18.087721Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-launcher-testvmiwxhw9-9dwk8 Pod phase: Pending • Failure [30.760 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:111 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:112 should not modify the spec on status update [It] /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:113 Expected <*errors.StatusError | 0xc42083c5a0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:119 ------------------------------ STEP: Starting the VirtualMachineInstance Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:34:27 http: TLS handshake error from 10.129.0.1:43404: EOF 2018/07/25 08:34:37 http: TLS handshake error from 10.129.0.1:43412: EOF level=info timestamp=2018-07-25T08:34:39.173487Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:34:47 http: TLS handshake error from 10.129.0.1:43420: EOF 2018/07/25 08:34:57 http: TLS handshake error from 10.129.0.1:43430: EOF 2018/07/25 08:35:07 http: TLS handshake error from 10.129.0.1:43438: EOF level=info timestamp=2018-07-25T08:35:09.478888Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:35:17 http: TLS handshake error from 10.129.0.1:43446: EOF 2018/07/25 08:35:27 http: TLS handshake error from 10.129.0.1:43454: EOF 2018/07/25 08:35:37 http: TLS handshake error from 10.129.0.1:43462: EOF level=info timestamp=2018-07-25T08:35:39.334195Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:35:47 http: TLS handshake error from 10.129.0.1:43470: EOF 2018/07/25 08:35:57 http: TLS handshake error from 10.129.0.1:43478: EOF 2018/07/25 08:36:07 http: TLS handshake error from 10.129.0.1:43486: EOF 2018/07/25 08:36:17 http: TLS handshake error from 10.129.0.1:43494: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running 2018/07/25 08:34:25 http: TLS handshake error from 10.129.0.1:43070: EOF 2018/07/25 08:34:35 http: TLS handshake error from 10.129.0.1:43078: EOF 2018/07/25 08:34:45 http: TLS handshake error from 10.129.0.1:43086: EOF 2018/07/25 08:34:55 http: TLS handshake error from 10.129.0.1:43094: EOF 2018/07/25 08:35:05 http: TLS handshake error from 10.129.0.1:43104: EOF level=info timestamp=2018-07-25T08:35:08.830168Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-25T08:35:08.880490Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:35:15 http: TLS handshake error from 10.129.0.1:43112: EOF 2018/07/25 08:35:25 http: TLS handshake error from 10.129.0.1:43120: EOF 2018/07/25 08:35:35 http: TLS handshake error from 10.129.0.1:43128: EOF 2018/07/25 08:35:45 http: TLS handshake error from 10.129.0.1:43136: EOF 2018/07/25 08:35:55 http: TLS handshake error from 10.129.0.1:43144: EOF 2018/07/25 08:36:05 http: TLS handshake error from 10.129.0.1:43152: EOF level=info timestamp=2018-07-25T08:36:09.263883Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:36:15 http: TLS handshake error from 10.129.0.1:43160: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:34:20.349631Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqmmr9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqmmr9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 7448e0b2-8fe5-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqmmr9" level=info timestamp=2018-07-25T08:34:50.528503Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwklwv kind= uid=86ac8b3a-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:34:50.530709Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwklwv kind= uid=86ac8b3a-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:34:50.819867Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwklwv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwklwv" level=info timestamp=2018-07-25T08:34:50.841681Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwklwv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwklwv" level=info timestamp=2018-07-25T08:34:51.281791Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwklwv\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwklwv, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 86ac8b3a-8fe5-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwklwv" level=info timestamp=2018-07-25T08:35:21.464821Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrb64 kind= uid=991db4fb-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:35:21.465458Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrb64 kind= uid=991db4fb-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:35:22.117546Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifrb64\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmifrb64, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 991db4fb-8fe5-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifrb64" level=info timestamp=2018-07-25T08:35:52.311601Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwxhw9 kind= uid=ab81f952-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:35:52.313123Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwxhw9 kind= uid=ab81f952-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:35:52.565544Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwxhw9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwxhw9" level=info timestamp=2018-07-25T08:35:52.649050Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwxhw9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwxhw9" level=info timestamp=2018-07-25T08:36:23.080704Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:36:23.082896Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:32:03.322094Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:32:03.326669Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:32:03.349217Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.295425Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:33:19.307022Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:33:19.404526Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmildfvc" level=info timestamp=2018-07-25T08:33:19.666866Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-25T08:33:19.667207Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind= uid=1806d3dd-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.667453Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-25T08:33:19.667534Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-25T08:33:19.667487Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-25T08:33:19.668372Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.668593Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:33:19.668794Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.679040Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:19:17.885686Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-25T08:19:17.899675Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-25T08:19:17.910064Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-25T08:19:18.000155Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-25T08:19:18.072641Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-25T08:19:18.087721Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-launcher-testvmiqpg74-7nqq9 Pod phase: Pending • Failure [30.999 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting multiple VMIs /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:129 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:130 should success [It] /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:131 Expected <*errors.StatusError | 0xc4201537a0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:55 ------------------------------ STEP: Starting a VirtualMachineInstance Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:34:47 http: TLS handshake error from 10.129.0.1:43420: EOF 2018/07/25 08:34:57 http: TLS handshake error from 10.129.0.1:43430: EOF 2018/07/25 08:35:07 http: TLS handshake error from 10.129.0.1:43438: EOF level=info timestamp=2018-07-25T08:35:09.478888Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:35:17 http: TLS handshake error from 10.129.0.1:43446: EOF 2018/07/25 08:35:27 http: TLS handshake error from 10.129.0.1:43454: EOF 2018/07/25 08:35:37 http: TLS handshake error from 10.129.0.1:43462: EOF level=info timestamp=2018-07-25T08:35:39.334195Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:35:47 http: TLS handshake error from 10.129.0.1:43470: EOF 2018/07/25 08:35:57 http: TLS handshake error from 10.129.0.1:43478: EOF 2018/07/25 08:36:07 http: TLS handshake error from 10.129.0.1:43486: EOF 2018/07/25 08:36:17 http: TLS handshake error from 10.129.0.1:43494: EOF 2018/07/25 08:36:27 http: TLS handshake error from 10.129.0.1:43502: EOF 2018/07/25 08:36:37 http: TLS handshake error from 10.129.0.1:43510: EOF 2018/07/25 08:36:47 http: TLS handshake error from 10.129.0.1:43518: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running 2018/07/25 08:35:05 http: TLS handshake error from 10.129.0.1:43104: EOF level=info timestamp=2018-07-25T08:35:08.830168Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-25T08:35:08.880490Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:35:15 http: TLS handshake error from 10.129.0.1:43112: EOF 2018/07/25 08:35:25 http: TLS handshake error from 10.129.0.1:43120: EOF 2018/07/25 08:35:35 http: TLS handshake error from 10.129.0.1:43128: EOF 2018/07/25 08:35:45 http: TLS handshake error from 10.129.0.1:43136: EOF 2018/07/25 08:35:55 http: TLS handshake error from 10.129.0.1:43144: EOF 2018/07/25 08:36:05 http: TLS handshake error from 10.129.0.1:43152: EOF level=info timestamp=2018-07-25T08:36:09.263883Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:36:15 http: TLS handshake error from 10.129.0.1:43160: EOF 2018/07/25 08:36:25 http: TLS handshake error from 10.129.0.1:43168: EOF 2018/07/25 08:36:35 http: TLS handshake error from 10.129.0.1:43176: EOF level=info timestamp=2018-07-25T08:36:39.439416Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:36:45 http: TLS handshake error from 10.129.0.1:43184: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:34:50.530709Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwklwv kind= uid=86ac8b3a-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:34:50.819867Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwklwv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwklwv" level=info timestamp=2018-07-25T08:34:50.841681Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwklwv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwklwv" level=info timestamp=2018-07-25T08:34:51.281791Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwklwv\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwklwv, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 86ac8b3a-8fe5-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwklwv" level=info timestamp=2018-07-25T08:35:21.464821Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrb64 kind= uid=991db4fb-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:35:21.465458Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrb64 kind= uid=991db4fb-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:35:22.117546Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifrb64\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmifrb64, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 991db4fb-8fe5-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifrb64" level=info timestamp=2018-07-25T08:35:52.311601Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwxhw9 kind= uid=ab81f952-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:35:52.313123Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwxhw9 kind= uid=ab81f952-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:35:52.565544Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwxhw9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwxhw9" level=info timestamp=2018-07-25T08:35:52.649050Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwxhw9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwxhw9" level=info timestamp=2018-07-25T08:36:23.080704Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:36:23.082896Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:36:53.867085Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:36:53.873685Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:33:19.668372Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.668593Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:33:19.668794Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildfvc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:33:19.679040Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-25T08:36:40.277071Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:36:41.125932Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-25T08:36:41.132850Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=Domain uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Domain is in state Paused reason StartingUp" level=info timestamp=2018-07-25T08:36:41.758854Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-25T08:36:41.759645Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=Domain uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-25T08:36:41.806235Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-25T08:36:41.808787Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:36:41.809173Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="No update processing required" level=info timestamp=2018-07-25T08:36:41.908265Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:36:41.927787Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:36:41.958728Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:19:17.885686Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-25T08:19:17.899675Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-25T08:19:17.910064Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-25T08:19:18.000155Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-25T08:19:18.072641Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-25T08:19:18.087721Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-launcher-testvmi6h55w-nx9nz Pod phase: Pending Pod name: virt-launcher-testvmiqpg74-7nqq9 Pod phase: Running level=info timestamp=2018-07-25T08:36:40.685439Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.116683Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:36:41.121091Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 630a6a10-c11f-4f2b-a798-c5bb91b507f6" level=info timestamp=2018-07-25T08:36:41.121397Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:36:41.137132Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.735662Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.748314Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:36:41.759442Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.789752Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.799852Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:36:41.803397Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:36:41.804935Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:36:41.810095Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.958112Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:36:42.125441Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 630a6a10-c11f-4f2b-a798-c5bb91b507f6: 179" • Failure in Spec Setup (BeforeEach) [30.615 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:61 Expose ClusterIP service [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:68 Should expose a Cluster IP service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:71 Expected error: <*errors.StatusError | 0xc420153e60>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 ------------------------------ Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running level=info timestamp=2018-07-25T08:35:09.478888Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:35:17 http: TLS handshake error from 10.129.0.1:43446: EOF 2018/07/25 08:35:27 http: TLS handshake error from 10.129.0.1:43454: EOF 2018/07/25 08:35:37 http: TLS handshake error from 10.129.0.1:43462: EOF level=info timestamp=2018-07-25T08:35:39.334195Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:35:47 http: TLS handshake error from 10.129.0.1:43470: EOF 2018/07/25 08:35:57 http: TLS handshake error from 10.129.0.1:43478: EOF 2018/07/25 08:36:07 http: TLS handshake error from 10.129.0.1:43486: EOF 2018/07/25 08:36:17 http: TLS handshake error from 10.129.0.1:43494: EOF 2018/07/25 08:36:27 http: TLS handshake error from 10.129.0.1:43502: EOF 2018/07/25 08:36:37 http: TLS handshake error from 10.129.0.1:43510: EOF 2018/07/25 08:36:47 http: TLS handshake error from 10.129.0.1:43518: EOF 2018/07/25 08:36:57 http: TLS handshake error from 10.129.0.1:43526: EOF 2018/07/25 08:37:07 http: TLS handshake error from 10.129.0.1:43534: EOF 2018/07/25 08:37:17 http: TLS handshake error from 10.129.0.1:43542: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running 2018/07/25 08:35:25 http: TLS handshake error from 10.129.0.1:43120: EOF 2018/07/25 08:35:35 http: TLS handshake error from 10.129.0.1:43128: EOF 2018/07/25 08:35:45 http: TLS handshake error from 10.129.0.1:43136: EOF 2018/07/25 08:35:55 http: TLS handshake error from 10.129.0.1:43144: EOF 2018/07/25 08:36:05 http: TLS handshake error from 10.129.0.1:43152: EOF level=info timestamp=2018-07-25T08:36:09.263883Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:36:15 http: TLS handshake error from 10.129.0.1:43160: EOF 2018/07/25 08:36:25 http: TLS handshake error from 10.129.0.1:43168: EOF 2018/07/25 08:36:35 http: TLS handshake error from 10.129.0.1:43176: EOF level=info timestamp=2018-07-25T08:36:39.439416Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:36:45 http: TLS handshake error from 10.129.0.1:43184: EOF 2018/07/25 08:36:55 http: TLS handshake error from 10.129.0.1:43192: EOF 2018/07/25 08:37:05 http: TLS handshake error from 10.129.0.1:43200: EOF level=info timestamp=2018-07-25T08:37:09.474094Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:37:15 http: TLS handshake error from 10.129.0.1:43208: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:35:21.464821Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrb64 kind= uid=991db4fb-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:35:21.465458Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrb64 kind= uid=991db4fb-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:35:22.117546Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifrb64\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmifrb64, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 991db4fb-8fe5-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifrb64" level=info timestamp=2018-07-25T08:35:52.311601Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwxhw9 kind= uid=ab81f952-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:35:52.313123Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwxhw9 kind= uid=ab81f952-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:35:52.565544Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwxhw9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwxhw9" level=info timestamp=2018-07-25T08:35:52.649050Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwxhw9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwxhw9" level=info timestamp=2018-07-25T08:36:23.080704Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:36:23.082896Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:36:53.867085Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:36:53.873685Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:37:24.445263Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:37:24.445574Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:37:24.678728Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmib4j5w\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmib4j5w" level=info timestamp=2018-07-25T08:37:24.743061Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmib4j5w\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmib4j5w" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:37:10.700328Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-25T08:37:10.700610Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=Domain uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Domain is in state Paused reason StartingUp" level=info timestamp=2018-07-25T08:37:10.932769Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-25T08:37:10.942089Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=Domain uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-25T08:37:10.966509Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:37:10.967072Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="No update processing required" level=info timestamp=2018-07-25T08:37:10.971989Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-25T08:37:11.017730Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:37:11.018217Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="No update processing required" level=error timestamp=2018-07-25T08:37:11.046685Z pos=vm.go:404 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6h55w\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-07-25T08:37:11.047048Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6h55w\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6h55w" level=info timestamp=2018-07-25T08:37:11.047840Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:37:11.062260Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:37:11.062885Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:37:11.068291Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:19:17.885686Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-25T08:19:17.899675Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-25T08:19:17.910064Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-25T08:19:18.000155Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-25T08:19:18.072641Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-25T08:19:18.087721Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-launcher-testvmi6h55w-nx9nz Pod phase: Running level=info timestamp=2018-07-25T08:37:10.687791Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:37:10.693863Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 8d06c6b5-f9dd-4bb3-bba0-de2a8f827a41" level=info timestamp=2018-07-25T08:37:10.694308Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:37:10.701027Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:10.909582Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:37:10.929211Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:10.933949Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:10.938588Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:37:10.958292Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:37:10.962395Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:10.963535Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:10.972866Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:11.053352Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:11.067341Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:11.698512Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 8d06c6b5-f9dd-4bb3-bba0-de2a8f827a41: 185" Pod name: virt-launcher-testvmib4j5w-rklrj Pod phase: Pending Pod name: virt-launcher-testvmiqpg74-7nqq9 Pod phase: Running level=info timestamp=2018-07-25T08:36:40.685439Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.116683Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:36:41.121091Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 630a6a10-c11f-4f2b-a798-c5bb91b507f6" level=info timestamp=2018-07-25T08:36:41.121397Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:36:41.137132Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.735662Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.748314Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:36:41.759442Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.789752Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.799852Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:36:41.803397Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:36:41.804935Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:36:41.810095Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.958112Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:36:42.125441Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 630a6a10-c11f-4f2b-a798-c5bb91b507f6: 179" • Failure in Spec Setup (BeforeEach) [30.773 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:61 Expose ClusterIP service with string target-port [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:98 Should expose a ClusterIP service and connect to the vm on port 80 /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:101 Expected error: <*errors.StatusError | 0xc4210e86c0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 ------------------------------ Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:35:37 http: TLS handshake error from 10.129.0.1:43462: EOF level=info timestamp=2018-07-25T08:35:39.334195Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:35:47 http: TLS handshake error from 10.129.0.1:43470: EOF 2018/07/25 08:35:57 http: TLS handshake error from 10.129.0.1:43478: EOF 2018/07/25 08:36:07 http: TLS handshake error from 10.129.0.1:43486: EOF 2018/07/25 08:36:17 http: TLS handshake error from 10.129.0.1:43494: EOF 2018/07/25 08:36:27 http: TLS handshake error from 10.129.0.1:43502: EOF 2018/07/25 08:36:37 http: TLS handshake error from 10.129.0.1:43510: EOF 2018/07/25 08:36:47 http: TLS handshake error from 10.129.0.1:43518: EOF 2018/07/25 08:36:57 http: TLS handshake error from 10.129.0.1:43526: EOF 2018/07/25 08:37:07 http: TLS handshake error from 10.129.0.1:43534: EOF 2018/07/25 08:37:17 http: TLS handshake error from 10.129.0.1:43542: EOF 2018/07/25 08:37:27 http: TLS handshake error from 10.129.0.1:43550: EOF 2018/07/25 08:37:37 http: TLS handshake error from 10.129.0.1:43558: EOF 2018/07/25 08:37:47 http: TLS handshake error from 10.129.0.1:43566: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running level=info timestamp=2018-07-25T08:36:09.263883Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:36:15 http: TLS handshake error from 10.129.0.1:43160: EOF 2018/07/25 08:36:25 http: TLS handshake error from 10.129.0.1:43168: EOF 2018/07/25 08:36:35 http: TLS handshake error from 10.129.0.1:43176: EOF level=info timestamp=2018-07-25T08:36:39.439416Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:36:45 http: TLS handshake error from 10.129.0.1:43184: EOF 2018/07/25 08:36:55 http: TLS handshake error from 10.129.0.1:43192: EOF 2018/07/25 08:37:05 http: TLS handshake error from 10.129.0.1:43200: EOF level=info timestamp=2018-07-25T08:37:09.474094Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:37:15 http: TLS handshake error from 10.129.0.1:43208: EOF 2018/07/25 08:37:25 http: TLS handshake error from 10.129.0.1:43216: EOF 2018/07/25 08:37:35 http: TLS handshake error from 10.129.0.1:43224: EOF level=info timestamp=2018-07-25T08:37:39.451977Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:37:45 http: TLS handshake error from 10.129.0.1:43232: EOF 2018/07/25 08:37:55 http: TLS handshake error from 10.129.0.1:43240: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:35:52.313123Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwxhw9 kind= uid=ab81f952-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:35:52.565544Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwxhw9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwxhw9" level=info timestamp=2018-07-25T08:35:52.649050Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwxhw9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwxhw9" level=info timestamp=2018-07-25T08:36:23.080704Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:36:23.082896Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:36:53.867085Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:36:53.873685Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:37:24.445263Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:37:24.445574Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:37:24.678728Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmib4j5w\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmib4j5w" level=info timestamp=2018-07-25T08:37:24.743061Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmib4j5w\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmib4j5w" level=info timestamp=2018-07-25T08:37:55.246638Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:37:55.254456Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:37:55.490739Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6km79\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6km79" level=info timestamp=2018-07-25T08:37:55.537141Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6km79\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6km79" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:37:11.047840Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:37:11.062260Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:37:11.062885Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:37:11.068291Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:37:41.340223Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:37:42.694145Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-25T08:37:42.694604Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=Domain uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Domain is in state Paused reason StartingUp" level=info timestamp=2018-07-25T08:37:43.286171Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-25T08:37:43.286442Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=Domain uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-25T08:37:43.349078Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-25T08:37:43.357495Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:37:43.358158Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="No update processing required" level=info timestamp=2018-07-25T08:37:43.467104Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:37:43.469107Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:37:43.482655Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:19:17.885686Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-25T08:19:17.899675Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-25T08:19:17.910064Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-25T08:19:18.000155Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-25T08:19:18.072641Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-25T08:19:18.087721Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-launcher-testvmi6h55w-nx9nz Pod phase: Running level=info timestamp=2018-07-25T08:37:10.687791Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:37:10.693863Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 8d06c6b5-f9dd-4bb3-bba0-de2a8f827a41" level=info timestamp=2018-07-25T08:37:10.694308Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:37:10.701027Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:10.909582Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:37:10.929211Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:10.933949Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:10.938588Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:37:10.958292Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:37:10.962395Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:10.963535Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:10.972866Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:11.053352Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:11.067341Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:11.698512Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 8d06c6b5-f9dd-4bb3-bba0-de2a8f827a41: 185" Pod name: virt-launcher-testvmi6km79-8xn4r Pod phase: Pending Pod name: virt-launcher-testvmib4j5w-rklrj Pod phase: Running level=info timestamp=2018-07-25T08:37:41.731673Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-25T08:37:42.674191Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:37:42.688362Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 5fc814fb-5aab-49ee-a1a6-3669a47728f9" level=info timestamp=2018-07-25T08:37:42.691079Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:37:42.696457Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:43.228370Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:37:43.282414Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:43.286986Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:43.301378Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:37:43.325141Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:37:43.332546Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:43.338890Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:43.352188Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:43.475226Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:43.695103Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 5fc814fb-5aab-49ee-a1a6-3669a47728f9: 192" Pod name: virt-launcher-testvmiqpg74-7nqq9 Pod phase: Running level=info timestamp=2018-07-25T08:36:40.685439Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.116683Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:36:41.121091Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 630a6a10-c11f-4f2b-a798-c5bb91b507f6" level=info timestamp=2018-07-25T08:36:41.121397Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:36:41.137132Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.735662Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.748314Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:36:41.759442Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.789752Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.799852Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:36:41.803397Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:36:41.804935Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:36:41.810095Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.958112Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:36:42.125441Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 630a6a10-c11f-4f2b-a798-c5bb91b507f6: 179" • Failure in Spec Setup (BeforeEach) [30.847 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:61 Expose NodePort service [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:124 Should expose a NodePort service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:129 Expected error: <*errors.StatusError | 0xc4210e9170>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 ------------------------------ Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:35:57 http: TLS handshake error from 10.129.0.1:43478: EOF 2018/07/25 08:36:07 http: TLS handshake error from 10.129.0.1:43486: EOF 2018/07/25 08:36:17 http: TLS handshake error from 10.129.0.1:43494: EOF 2018/07/25 08:36:27 http: TLS handshake error from 10.129.0.1:43502: EOF 2018/07/25 08:36:37 http: TLS handshake error from 10.129.0.1:43510: EOF 2018/07/25 08:36:47 http: TLS handshake error from 10.129.0.1:43518: EOF 2018/07/25 08:36:57 http: TLS handshake error from 10.129.0.1:43526: EOF 2018/07/25 08:37:07 http: TLS handshake error from 10.129.0.1:43534: EOF 2018/07/25 08:37:17 http: TLS handshake error from 10.129.0.1:43542: EOF 2018/07/25 08:37:27 http: TLS handshake error from 10.129.0.1:43550: EOF 2018/07/25 08:37:37 http: TLS handshake error from 10.129.0.1:43558: EOF 2018/07/25 08:37:47 http: TLS handshake error from 10.129.0.1:43566: EOF 2018/07/25 08:37:57 http: TLS handshake error from 10.129.0.1:43574: EOF 2018/07/25 08:38:07 http: TLS handshake error from 10.129.0.1:43584: EOF 2018/07/25 08:38:17 http: TLS handshake error from 10.129.0.1:43592: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running level=info timestamp=2018-07-25T08:36:39.439416Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:36:45 http: TLS handshake error from 10.129.0.1:43184: EOF 2018/07/25 08:36:55 http: TLS handshake error from 10.129.0.1:43192: EOF 2018/07/25 08:37:05 http: TLS handshake error from 10.129.0.1:43200: EOF level=info timestamp=2018-07-25T08:37:09.474094Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:37:15 http: TLS handshake error from 10.129.0.1:43208: EOF 2018/07/25 08:37:25 http: TLS handshake error from 10.129.0.1:43216: EOF 2018/07/25 08:37:35 http: TLS handshake error from 10.129.0.1:43224: EOF level=info timestamp=2018-07-25T08:37:39.451977Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:37:45 http: TLS handshake error from 10.129.0.1:43232: EOF 2018/07/25 08:37:55 http: TLS handshake error from 10.129.0.1:43240: EOF 2018/07/25 08:38:05 http: TLS handshake error from 10.129.0.1:43250: EOF level=info timestamp=2018-07-25T08:38:09.370461Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:38:15 http: TLS handshake error from 10.129.0.1:43258: EOF 2018/07/25 08:38:25 http: TLS handshake error from 10.129.0.1:43266: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:36:23.082896Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:36:53.867085Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:36:53.873685Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:37:24.445263Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:37:24.445574Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:37:24.678728Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmib4j5w\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmib4j5w" level=info timestamp=2018-07-25T08:37:24.743061Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmib4j5w\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmib4j5w" level=info timestamp=2018-07-25T08:37:55.246638Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:37:55.254456Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:37:55.490739Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6km79\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6km79" level=info timestamp=2018-07-25T08:37:55.537141Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6km79\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6km79" level=info timestamp=2018-07-25T08:38:26.106080Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:38:26.106659Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:38:26.296268Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmixmcbj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmixmcbj" level=info timestamp=2018-07-25T08:38:26.332085Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmixmcbj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmixmcbj" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:37:43.358158Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="No update processing required" level=info timestamp=2018-07-25T08:37:43.467104Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:37:43.469107Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:37:43.482655Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:38:12.496416Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:38:13.690459Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-25T08:38:13.698161Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=Domain uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Domain is in state Paused reason StartingUp" level=info timestamp=2018-07-25T08:38:14.031655Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:38:14.031935Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="No update processing required" level=info timestamp=2018-07-25T08:38:14.036662Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-25T08:38:14.036820Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=Domain uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-25T08:38:14.059722Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-25T08:38:14.164703Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:38:14.167556Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:38:14.176889Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:19:17.885686Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-25T08:19:17.899675Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-25T08:19:17.910064Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-25T08:19:18.000155Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-25T08:19:18.072641Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-25T08:19:18.087721Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-launcher-testvmi6h55w-nx9nz Pod phase: Running level=info timestamp=2018-07-25T08:37:10.687791Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:37:10.693863Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 8d06c6b5-f9dd-4bb3-bba0-de2a8f827a41" level=info timestamp=2018-07-25T08:37:10.694308Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:37:10.701027Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:10.909582Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:37:10.929211Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:10.933949Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:10.938588Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:37:10.958292Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:37:10.962395Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:10.963535Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:10.972866Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:11.053352Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:11.067341Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:11.698512Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 8d06c6b5-f9dd-4bb3-bba0-de2a8f827a41: 185" Pod name: virt-launcher-testvmi6km79-8xn4r Pod phase: Running level=info timestamp=2018-07-25T08:38:13.103601Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-25T08:38:13.669998Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:38:13.700071Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:13.938162Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:38:13.990978Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:38:13.993929Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 70db9c91-88c9-4ecc-a5d3-cc811872621c" level=info timestamp=2018-07-25T08:38:13.994991Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:38:13.995168Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:38:13.997037Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:38:14.038113Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:14.038418Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:38:14.056615Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:38:14.060399Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:14.175730Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:38:15.001648Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 70db9c91-88c9-4ecc-a5d3-cc811872621c: 191" Pod name: virt-launcher-testvmib4j5w-rklrj Pod phase: Running level=info timestamp=2018-07-25T08:37:41.731673Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-25T08:37:42.674191Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:37:42.688362Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 5fc814fb-5aab-49ee-a1a6-3669a47728f9" level=info timestamp=2018-07-25T08:37:42.691079Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:37:42.696457Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:43.228370Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:37:43.282414Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:43.286986Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:43.301378Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:37:43.325141Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:37:43.332546Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:43.338890Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:43.352188Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:43.475226Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:43.695103Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 5fc814fb-5aab-49ee-a1a6-3669a47728f9: 192" Pod name: virt-launcher-testvmiqpg74-7nqq9 Pod phase: Running level=info timestamp=2018-07-25T08:36:40.685439Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.116683Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:36:41.121091Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 630a6a10-c11f-4f2b-a798-c5bb91b507f6" level=info timestamp=2018-07-25T08:36:41.121397Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:36:41.137132Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.735662Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.748314Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:36:41.759442Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.789752Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.799852Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:36:41.803397Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:36:41.804935Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:36:41.810095Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.958112Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:36:42.125441Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 630a6a10-c11f-4f2b-a798-c5bb91b507f6: 179" Pod name: virt-launcher-testvmixmcbj-bxx8j Pod phase: Pending • Failure in Spec Setup (BeforeEach) [31.213 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose UDP service on a VMI /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:166 Expose ClusterIP UDP service [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:173 Should expose a ClusterIP service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:177 Expected error: <*errors.StatusError | 0xc420152c60>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 ------------------------------ Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:36:37 http: TLS handshake error from 10.129.0.1:43510: EOF 2018/07/25 08:36:47 http: TLS handshake error from 10.129.0.1:43518: EOF 2018/07/25 08:36:57 http: TLS handshake error from 10.129.0.1:43526: EOF 2018/07/25 08:37:07 http: TLS handshake error from 10.129.0.1:43534: EOF 2018/07/25 08:37:17 http: TLS handshake error from 10.129.0.1:43542: EOF 2018/07/25 08:37:27 http: TLS handshake error from 10.129.0.1:43550: EOF 2018/07/25 08:37:37 http: TLS handshake error from 10.129.0.1:43558: EOF 2018/07/25 08:37:47 http: TLS handshake error from 10.129.0.1:43566: EOF 2018/07/25 08:37:57 http: TLS handshake error from 10.129.0.1:43574: EOF 2018/07/25 08:38:07 http: TLS handshake error from 10.129.0.1:43584: EOF 2018/07/25 08:38:17 http: TLS handshake error from 10.129.0.1:43592: EOF 2018/07/25 08:38:27 http: TLS handshake error from 10.129.0.1:43600: EOF 2018/07/25 08:38:37 http: TLS handshake error from 10.129.0.1:43608: EOF 2018/07/25 08:38:47 http: TLS handshake error from 10.129.0.1:43616: EOF 2018/07/25 08:38:57 http: TLS handshake error from 10.129.0.1:43624: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running level=info timestamp=2018-07-25T08:37:09.474094Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:37:15 http: TLS handshake error from 10.129.0.1:43208: EOF 2018/07/25 08:37:25 http: TLS handshake error from 10.129.0.1:43216: EOF 2018/07/25 08:37:35 http: TLS handshake error from 10.129.0.1:43224: EOF level=info timestamp=2018-07-25T08:37:39.451977Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:37:45 http: TLS handshake error from 10.129.0.1:43232: EOF 2018/07/25 08:37:55 http: TLS handshake error from 10.129.0.1:43240: EOF 2018/07/25 08:38:05 http: TLS handshake error from 10.129.0.1:43250: EOF level=info timestamp=2018-07-25T08:38:09.370461Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:38:15 http: TLS handshake error from 10.129.0.1:43258: EOF 2018/07/25 08:38:25 http: TLS handshake error from 10.129.0.1:43266: EOF 2018/07/25 08:38:35 http: TLS handshake error from 10.129.0.1:43274: EOF level=info timestamp=2018-07-25T08:38:39.459117Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:38:45 http: TLS handshake error from 10.129.0.1:43282: EOF 2018/07/25 08:38:55 http: TLS handshake error from 10.129.0.1:43290: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:37:24.445574Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:37:24.678728Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmib4j5w\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmib4j5w" level=info timestamp=2018-07-25T08:37:24.743061Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmib4j5w\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmib4j5w" level=info timestamp=2018-07-25T08:37:55.246638Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:37:55.254456Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:37:55.490739Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6km79\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6km79" level=info timestamp=2018-07-25T08:37:55.537141Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6km79\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6km79" level=info timestamp=2018-07-25T08:38:26.106080Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:38:26.106659Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:38:26.296268Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmixmcbj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmixmcbj" level=info timestamp=2018-07-25T08:38:26.332085Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmixmcbj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmixmcbj" level=info timestamp=2018-07-25T08:38:57.282893Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:38:57.283701Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:38:57.521246Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmik7jkl\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmik7jkl" level=info timestamp=2018-07-25T08:38:57.571045Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmik7jkl\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmik7jkl" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:38:14.167556Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:38:14.176889Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:38:44.844225Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:38:45.265792Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-25T08:38:45.266103Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=Domain uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Domain is in state Shutoff reason Unknown" level=info timestamp=2018-07-25T08:38:46.287884Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:38:46.304978Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:38:46.308535Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-25T08:38:46.316181Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=Domain uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-25T08:38:46.317577Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:38:46.317699Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="No update processing required" level=info timestamp=2018-07-25T08:38:46.343300Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-25T08:38:46.391772Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:38:46.391972Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:38:46.396550Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:19:17.885686Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-25T08:19:17.899675Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-25T08:19:17.910064Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-25T08:19:18.000155Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-25T08:19:18.072641Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-25T08:19:18.087721Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-launcher-testvmi6h55w-nx9nz Pod phase: Running level=info timestamp=2018-07-25T08:37:10.687791Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:37:10.693863Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 8d06c6b5-f9dd-4bb3-bba0-de2a8f827a41" level=info timestamp=2018-07-25T08:37:10.694308Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:37:10.701027Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:10.909582Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:37:10.929211Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:10.933949Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:10.938588Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:37:10.958292Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:37:10.962395Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:10.963535Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:10.972866Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:11.053352Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:11.067341Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:11.698512Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 8d06c6b5-f9dd-4bb3-bba0-de2a8f827a41: 185" Pod name: virt-launcher-testvmi6km79-8xn4r Pod phase: Running level=info timestamp=2018-07-25T08:38:13.103601Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-25T08:38:13.669998Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:38:13.700071Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:13.938162Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:38:13.990978Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:38:13.993929Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 70db9c91-88c9-4ecc-a5d3-cc811872621c" level=info timestamp=2018-07-25T08:38:13.994991Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:38:13.995168Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:38:13.997037Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:38:14.038113Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:14.038418Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:38:14.056615Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:38:14.060399Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:14.175730Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:38:15.001648Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 70db9c91-88c9-4ecc-a5d3-cc811872621c: 191" Pod name: virt-launcher-testvmib4j5w-rklrj Pod phase: Running level=info timestamp=2018-07-25T08:37:41.731673Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-25T08:37:42.674191Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:37:42.688362Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 5fc814fb-5aab-49ee-a1a6-3669a47728f9" level=info timestamp=2018-07-25T08:37:42.691079Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:37:42.696457Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:43.228370Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:37:43.282414Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:43.286986Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:43.301378Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:37:43.325141Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:37:43.332546Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:43.338890Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:43.352188Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:43.475226Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:43.695103Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 5fc814fb-5aab-49ee-a1a6-3669a47728f9: 192" Pod name: virt-launcher-testvmik7jkl-vgdlr Pod phase: Pending Pod name: virt-launcher-testvmiqpg74-7nqq9 Pod phase: Running level=info timestamp=2018-07-25T08:36:40.685439Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.116683Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:36:41.121091Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 630a6a10-c11f-4f2b-a798-c5bb91b507f6" level=info timestamp=2018-07-25T08:36:41.121397Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:36:41.137132Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.735662Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.748314Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:36:41.759442Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.789752Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.799852Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:36:41.803397Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:36:41.804935Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:36:41.810095Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.958112Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:36:42.125441Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 630a6a10-c11f-4f2b-a798-c5bb91b507f6: 179" Pod name: virt-launcher-testvmixmcbj-bxx8j Pod phase: Running level=info timestamp=2018-07-25T08:38:45.254463Z pos=client.go:119 component=virt-launcher msg="domain status: 5:0" level=info timestamp=2018-07-25T08:38:45.266517Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:46.000108Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 0c05532b-f95f-47f9-a66d-b451a0a4ab66" level=info timestamp=2018-07-25T08:38:46.006080Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:38:46.207644Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:38:46.237890Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:38:46.248681Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:38:46.254829Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:38:46.310365Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:38:46.316577Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:46.316723Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:38:46.341888Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:38:46.343870Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:46.396003Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:38:47.013565Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 0c05532b-f95f-47f9-a66d-b451a0a4ab66: 193" • Failure in Spec Setup (BeforeEach) [31.351 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose UDP service on a VMI /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:166 Expose NodePort UDP service [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:205 Should expose a NodePort service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:210 Expected error: <*errors.StatusError | 0xc420152630>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 ------------------------------ Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:37:17 http: TLS handshake error from 10.129.0.1:43542: EOF 2018/07/25 08:37:27 http: TLS handshake error from 10.129.0.1:43550: EOF 2018/07/25 08:37:37 http: TLS handshake error from 10.129.0.1:43558: EOF 2018/07/25 08:37:47 http: TLS handshake error from 10.129.0.1:43566: EOF 2018/07/25 08:37:57 http: TLS handshake error from 10.129.0.1:43574: EOF 2018/07/25 08:38:07 http: TLS handshake error from 10.129.0.1:43584: EOF 2018/07/25 08:38:17 http: TLS handshake error from 10.129.0.1:43592: EOF 2018/07/25 08:38:27 http: TLS handshake error from 10.129.0.1:43600: EOF 2018/07/25 08:38:37 http: TLS handshake error from 10.129.0.1:43608: EOF 2018/07/25 08:38:47 http: TLS handshake error from 10.129.0.1:43616: EOF 2018/07/25 08:38:57 http: TLS handshake error from 10.129.0.1:43624: EOF 2018/07/25 08:39:07 http: TLS handshake error from 10.129.0.1:43632: EOF level=info timestamp=2018-07-25T08:39:09.280271Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:39:17 http: TLS handshake error from 10.129.0.1:43640: EOF 2018/07/25 08:39:27 http: TLS handshake error from 10.129.0.1:43648: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running 2018/07/25 08:37:35 http: TLS handshake error from 10.129.0.1:43224: EOF level=info timestamp=2018-07-25T08:37:39.451977Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:37:45 http: TLS handshake error from 10.129.0.1:43232: EOF 2018/07/25 08:37:55 http: TLS handshake error from 10.129.0.1:43240: EOF 2018/07/25 08:38:05 http: TLS handshake error from 10.129.0.1:43250: EOF level=info timestamp=2018-07-25T08:38:09.370461Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:38:15 http: TLS handshake error from 10.129.0.1:43258: EOF 2018/07/25 08:38:25 http: TLS handshake error from 10.129.0.1:43266: EOF 2018/07/25 08:38:35 http: TLS handshake error from 10.129.0.1:43274: EOF level=info timestamp=2018-07-25T08:38:39.459117Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:38:45 http: TLS handshake error from 10.129.0.1:43282: EOF 2018/07/25 08:38:55 http: TLS handshake error from 10.129.0.1:43290: EOF 2018/07/25 08:39:05 http: TLS handshake error from 10.129.0.1:43298: EOF 2018/07/25 08:39:15 http: TLS handshake error from 10.129.0.1:43306: EOF 2018/07/25 08:39:25 http: TLS handshake error from 10.129.0.1:43314: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:37:24.445574Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:37:24.678728Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmib4j5w\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmib4j5w" level=info timestamp=2018-07-25T08:37:24.743061Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmib4j5w\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmib4j5w" level=info timestamp=2018-07-25T08:37:55.246638Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:37:55.254456Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:37:55.490739Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6km79\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6km79" level=info timestamp=2018-07-25T08:37:55.537141Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6km79\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6km79" level=info timestamp=2018-07-25T08:38:26.106080Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:38:26.106659Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:38:26.296268Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmixmcbj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmixmcbj" level=info timestamp=2018-07-25T08:38:26.332085Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmixmcbj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmixmcbj" level=info timestamp=2018-07-25T08:38:57.282893Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:38:57.283701Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:38:57.521246Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmik7jkl\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmik7jkl" level=info timestamp=2018-07-25T08:38:57.571045Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmik7jkl\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmik7jkl" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:38:14.167556Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:38:14.176889Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:38:44.844225Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:38:45.265792Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-25T08:38:45.266103Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=Domain uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Domain is in state Shutoff reason Unknown" level=info timestamp=2018-07-25T08:38:46.287884Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:38:46.304978Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:38:46.308535Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-25T08:38:46.316181Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=Domain uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-25T08:38:46.317577Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:38:46.317699Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="No update processing required" level=info timestamp=2018-07-25T08:38:46.343300Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-25T08:38:46.391772Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:38:46.391972Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:38:46.396550Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:19:17.910064Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-25T08:19:18.000155Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-25T08:19:18.072641Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-25T08:19:18.087721Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-07-25T08:39:14.624807Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:39:15.530752Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-25T08:39:15.535829Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=Domain uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Domain is in state Paused reason StartingUp" level=info timestamp=2018-07-25T08:39:16.015829Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-25T08:39:16.023708Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=Domain uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-25T08:39:16.165492Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-25T08:39:16.178652Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:39:16.178832Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="No update processing required" level=info timestamp=2018-07-25T08:39:16.680698Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:39:16.680861Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:39:16.685793Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi6h55w-nx9nz Pod phase: Running level=info timestamp=2018-07-25T08:37:10.687791Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:37:10.693863Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 8d06c6b5-f9dd-4bb3-bba0-de2a8f827a41" level=info timestamp=2018-07-25T08:37:10.694308Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:37:10.701027Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:10.909582Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:37:10.929211Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:10.933949Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:10.938588Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:37:10.958292Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:37:10.962395Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:10.963535Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:10.972866Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:11.053352Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:11.067341Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:11.698512Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 8d06c6b5-f9dd-4bb3-bba0-de2a8f827a41: 185" Pod name: virt-launcher-testvmi6km79-8xn4r Pod phase: Running level=info timestamp=2018-07-25T08:38:13.103601Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-25T08:38:13.669998Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:38:13.700071Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:13.938162Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:38:13.990978Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:38:13.993929Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 70db9c91-88c9-4ecc-a5d3-cc811872621c" level=info timestamp=2018-07-25T08:38:13.994991Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:38:13.995168Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:38:13.997037Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:38:14.038113Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:14.038418Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:38:14.056615Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:38:14.060399Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:14.175730Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:38:15.001648Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 70db9c91-88c9-4ecc-a5d3-cc811872621c: 191" Pod name: virt-launcher-testvmib4j5w-rklrj Pod phase: Running level=info timestamp=2018-07-25T08:37:41.731673Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-25T08:37:42.674191Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:37:42.688362Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 5fc814fb-5aab-49ee-a1a6-3669a47728f9" level=info timestamp=2018-07-25T08:37:42.691079Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:37:42.696457Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:43.228370Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:37:43.282414Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:43.286986Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:43.301378Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:37:43.325141Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:37:43.332546Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:43.338890Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:43.352188Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:43.475226Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:43.695103Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 5fc814fb-5aab-49ee-a1a6-3669a47728f9: 192" Pod name: virt-launcher-testvmik7jkl-vgdlr Pod phase: Running level=info timestamp=2018-07-25T08:39:14.851731Z pos=manager.go:157 component=virt-launcher namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Domain defined." level=info timestamp=2018-07-25T08:39:15.506625Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:39:15.536847Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:39:15.846382Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 1735a816-1a06-4f82-8b99-b1bcabe10d81" level=info timestamp=2018-07-25T08:39:15.853332Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:39:15.946152Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:39:16.006018Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:39:16.017007Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:39:16.038831Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:39:16.047372Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:39:16.051355Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:39:16.079171Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:39:16.166565Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:39:16.685473Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:39:16.859575Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 1735a816-1a06-4f82-8b99-b1bcabe10d81: 191" Pod name: virt-launcher-testvmiqpg74-7nqq9 Pod phase: Running level=info timestamp=2018-07-25T08:36:40.685439Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.116683Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:36:41.121091Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 630a6a10-c11f-4f2b-a798-c5bb91b507f6" level=info timestamp=2018-07-25T08:36:41.121397Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:36:41.137132Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.735662Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.748314Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:36:41.759442Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.789752Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.799852Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:36:41.803397Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:36:41.804935Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:36:41.810095Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.958112Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:36:42.125441Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 630a6a10-c11f-4f2b-a798-c5bb91b507f6: 179" Pod name: virt-launcher-testvmixmcbj-bxx8j Pod phase: Running level=info timestamp=2018-07-25T08:38:45.254463Z pos=client.go:119 component=virt-launcher msg="domain status: 5:0" level=info timestamp=2018-07-25T08:38:45.266517Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:46.000108Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 0c05532b-f95f-47f9-a66d-b451a0a4ab66" level=info timestamp=2018-07-25T08:38:46.006080Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:38:46.207644Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:38:46.237890Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:38:46.248681Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:38:46.254829Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:38:46.310365Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:38:46.316577Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:46.316723Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:38:46.341888Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:38:46.343870Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:46.396003Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:38:47.013565Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 0c05532b-f95f-47f9-a66d-b451a0a4ab66: 193" • Failure in Spec Setup (BeforeEach) [31.256 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VMI replica set /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:253 Expose ClusterIP service [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:286 Should create a ClusterIP service on VMRS and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:290 Expected error: <*errors.StatusError | 0xc420153560>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:264 ------------------------------ STEP: Creating a VMRS object with 2 replicas STEP: Start the replica set Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:37:57 http: TLS handshake error from 10.129.0.1:43574: EOF 2018/07/25 08:38:07 http: TLS handshake error from 10.129.0.1:43584: EOF 2018/07/25 08:38:17 http: TLS handshake error from 10.129.0.1:43592: EOF 2018/07/25 08:38:27 http: TLS handshake error from 10.129.0.1:43600: EOF 2018/07/25 08:38:37 http: TLS handshake error from 10.129.0.1:43608: EOF 2018/07/25 08:38:47 http: TLS handshake error from 10.129.0.1:43616: EOF 2018/07/25 08:38:57 http: TLS handshake error from 10.129.0.1:43624: EOF 2018/07/25 08:39:07 http: TLS handshake error from 10.129.0.1:43632: EOF level=info timestamp=2018-07-25T08:39:09.280271Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:39:17 http: TLS handshake error from 10.129.0.1:43640: EOF 2018/07/25 08:39:27 http: TLS handshake error from 10.129.0.1:43648: EOF 2018/07/25 08:39:37 http: TLS handshake error from 10.129.0.1:43656: EOF level=info timestamp=2018-07-25T08:39:39.266694Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:39:47 http: TLS handshake error from 10.129.0.1:43664: EOF 2018/07/25 08:39:57 http: TLS handshake error from 10.129.0.1:43672: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running 2018/07/25 08:37:55 http: TLS handshake error from 10.129.0.1:43240: EOF 2018/07/25 08:38:05 http: TLS handshake error from 10.129.0.1:43250: EOF level=info timestamp=2018-07-25T08:38:09.370461Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:38:15 http: TLS handshake error from 10.129.0.1:43258: EOF 2018/07/25 08:38:25 http: TLS handshake error from 10.129.0.1:43266: EOF 2018/07/25 08:38:35 http: TLS handshake error from 10.129.0.1:43274: EOF level=info timestamp=2018-07-25T08:38:39.459117Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:38:45 http: TLS handshake error from 10.129.0.1:43282: EOF 2018/07/25 08:38:55 http: TLS handshake error from 10.129.0.1:43290: EOF 2018/07/25 08:39:05 http: TLS handshake error from 10.129.0.1:43298: EOF 2018/07/25 08:39:15 http: TLS handshake error from 10.129.0.1:43306: EOF 2018/07/25 08:39:25 http: TLS handshake error from 10.129.0.1:43314: EOF 2018/07/25 08:39:35 http: TLS handshake error from 10.129.0.1:43322: EOF 2018/07/25 08:39:45 http: TLS handshake error from 10.129.0.1:43330: EOF 2018/07/25 08:39:55 http: TLS handshake error from 10.129.0.1:43338: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:37:55.254456Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:37:55.490739Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6km79\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6km79" level=info timestamp=2018-07-25T08:37:55.537141Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6km79\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6km79" level=info timestamp=2018-07-25T08:38:26.106080Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:38:26.106659Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:38:26.296268Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmixmcbj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmixmcbj" level=info timestamp=2018-07-25T08:38:26.332085Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmixmcbj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmixmcbj" level=info timestamp=2018-07-25T08:38:57.282893Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:38:57.283701Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:38:57.521246Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmik7jkl\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmik7jkl" level=info timestamp=2018-07-25T08:38:57.571045Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmik7jkl\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmik7jkl" level=error timestamp=2018-07-25T08:39:58.788515Z pos=replicaset.go:225 component=virt-controller service=http namespace=kubevirt-test-default name=replicasetl6cqb kind= uid=2c782674-8fe6-11e8-bb2b-525500d15501 msg="Scaling the replicaset failed." level=info timestamp=2018-07-25T08:39:59.925611Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitgfzg kind= uid=3f18c5dd-8fe6-11e8-bb2b-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-25T08:39:59.926032Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitgfzg kind= uid=3f18c5dd-8fe6-11e8-bb2b-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-25T08:39:59.926135Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:38:14.167556Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:38:14.176889Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:38:44.844225Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:38:45.265792Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-25T08:38:45.266103Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=Domain uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Domain is in state Shutoff reason Unknown" level=info timestamp=2018-07-25T08:38:46.287884Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:38:46.304978Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:38:46.308535Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-25T08:38:46.316181Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=Domain uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-25T08:38:46.317577Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:38:46.317699Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="No update processing required" level=info timestamp=2018-07-25T08:38:46.343300Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-25T08:38:46.391772Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:38:46.391972Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:38:46.396550Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:19:17.910064Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-25T08:19:18.000155Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-25T08:19:18.072641Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-25T08:19:18.087721Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-07-25T08:39:14.624807Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:39:15.530752Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-25T08:39:15.535829Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=Domain uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Domain is in state Paused reason StartingUp" level=info timestamp=2018-07-25T08:39:16.015829Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-25T08:39:16.023708Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=Domain uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-25T08:39:16.165492Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-25T08:39:16.178652Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:39:16.178832Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="No update processing required" level=info timestamp=2018-07-25T08:39:16.680698Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:39:16.680861Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:39:16.685793Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi6h55w-nx9nz Pod phase: Running level=info timestamp=2018-07-25T08:37:10.687791Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:37:10.693863Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 8d06c6b5-f9dd-4bb3-bba0-de2a8f827a41" level=info timestamp=2018-07-25T08:37:10.694308Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:37:10.701027Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:10.909582Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:37:10.929211Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:10.933949Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:10.938588Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:37:10.958292Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:37:10.962395Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:10.963535Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:10.972866Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:11.053352Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:11.067341Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h55w kind= uid=d031725b-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:11.698512Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 8d06c6b5-f9dd-4bb3-bba0-de2a8f827a41: 185" Pod name: virt-launcher-testvmi6km79-8xn4r Pod phase: Running level=info timestamp=2018-07-25T08:38:13.103601Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-25T08:38:13.669998Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:38:13.700071Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:13.938162Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:38:13.990978Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:38:13.993929Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 70db9c91-88c9-4ecc-a5d3-cc811872621c" level=info timestamp=2018-07-25T08:38:13.994991Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:38:13.995168Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:38:13.997037Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:38:14.038113Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:14.038418Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:38:14.056615Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:38:14.060399Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:14.175730Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6km79 kind= uid=f4c90fe5-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:38:15.001648Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 70db9c91-88c9-4ecc-a5d3-cc811872621c: 191" Pod name: virt-launcher-testvmib4j5w-rklrj Pod phase: Running level=info timestamp=2018-07-25T08:37:41.731673Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-25T08:37:42.674191Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:37:42.688362Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 5fc814fb-5aab-49ee-a1a6-3669a47728f9" level=info timestamp=2018-07-25T08:37:42.691079Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:37:42.696457Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:43.228370Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:37:43.282414Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:43.286986Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:43.301378Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:37:43.325141Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:37:43.332546Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:43.338890Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:37:43.352188Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:37:43.475226Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmib4j5w kind= uid=e271363f-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:37:43.695103Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 5fc814fb-5aab-49ee-a1a6-3669a47728f9: 192" Pod name: virt-launcher-testvmik7jkl-vgdlr Pod phase: Running level=info timestamp=2018-07-25T08:39:14.851731Z pos=manager.go:157 component=virt-launcher namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Domain defined." level=info timestamp=2018-07-25T08:39:15.506625Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:39:15.536847Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:39:15.846382Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 1735a816-1a06-4f82-8b99-b1bcabe10d81" level=info timestamp=2018-07-25T08:39:15.853332Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:39:15.946152Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:39:16.006018Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:39:16.017007Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:39:16.038831Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:39:16.047372Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:39:16.051355Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:39:16.079171Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:39:16.166565Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:39:16.685473Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:39:16.859575Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 1735a816-1a06-4f82-8b99-b1bcabe10d81: 191" Pod name: virt-launcher-testvmiqpg74-7nqq9 Pod phase: Running level=info timestamp=2018-07-25T08:36:40.685439Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.116683Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-25T08:36:41.121091Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 630a6a10-c11f-4f2b-a798-c5bb91b507f6" level=info timestamp=2018-07-25T08:36:41.121397Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:36:41.137132Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.735662Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.748314Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:36:41.759442Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.789752Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:36:41.799852Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:36:41.803397Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:36:41.804935Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:36:41.810095Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:36:41.958112Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:36:42.125441Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 630a6a10-c11f-4f2b-a798-c5bb91b507f6: 179" Pod name: virt-launcher-testvmixmcbj-bxx8j Pod phase: Running level=info timestamp=2018-07-25T08:38:45.254463Z pos=client.go:119 component=virt-launcher msg="domain status: 5:0" level=info timestamp=2018-07-25T08:38:45.266517Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:46.000108Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 0c05532b-f95f-47f9-a66d-b451a0a4ab66" level=info timestamp=2018-07-25T08:38:46.006080Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-25T08:38:46.207644Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-25T08:38:46.237890Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:38:46.248681Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Domain started." level=info timestamp=2018-07-25T08:38:46.254829Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:38:46.310365Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:38:46.316577Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:46.316723Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-25T08:38:46.341888Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-25T08:38:46.343870Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-25T08:38:46.396003Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-25T08:38:47.013565Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 0c05532b-f95f-47f9-a66d-b451a0a4ab66: 193" • Failure in Spec Setup (BeforeEach) [31.313 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on an VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:318 Expose ClusterIP service [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:362 Connect to ClusterIP services that was set when VM was offline /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:363 Expected error: <*errors.StatusError | 0xc42083c480>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:331 ------------------------------ STEP: Creating an VM object STEP: Creating the VM Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:38:37 http: TLS handshake error from 10.129.0.1:43608: EOF 2018/07/25 08:38:47 http: TLS handshake error from 10.129.0.1:43616: EOF 2018/07/25 08:38:57 http: TLS handshake error from 10.129.0.1:43624: EOF 2018/07/25 08:39:07 http: TLS handshake error from 10.129.0.1:43632: EOF level=info timestamp=2018-07-25T08:39:09.280271Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:39:17 http: TLS handshake error from 10.129.0.1:43640: EOF 2018/07/25 08:39:27 http: TLS handshake error from 10.129.0.1:43648: EOF 2018/07/25 08:39:37 http: TLS handshake error from 10.129.0.1:43656: EOF level=info timestamp=2018-07-25T08:39:39.266694Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:39:47 http: TLS handshake error from 10.129.0.1:43664: EOF 2018/07/25 08:39:57 http: TLS handshake error from 10.129.0.1:43672: EOF 2018/07/25 08:40:07 http: TLS handshake error from 10.129.0.1:43680: EOF level=info timestamp=2018-07-25T08:40:09.385496Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:40:17 http: TLS handshake error from 10.129.0.1:43688: EOF 2018/07/25 08:40:27 http: TLS handshake error from 10.129.0.1:43696: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running 2018/07/25 08:38:15 http: TLS handshake error from 10.129.0.1:43258: EOF 2018/07/25 08:38:25 http: TLS handshake error from 10.129.0.1:43266: EOF 2018/07/25 08:38:35 http: TLS handshake error from 10.129.0.1:43274: EOF level=info timestamp=2018-07-25T08:38:39.459117Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:38:45 http: TLS handshake error from 10.129.0.1:43282: EOF 2018/07/25 08:38:55 http: TLS handshake error from 10.129.0.1:43290: EOF 2018/07/25 08:39:05 http: TLS handshake error from 10.129.0.1:43298: EOF 2018/07/25 08:39:15 http: TLS handshake error from 10.129.0.1:43306: EOF 2018/07/25 08:39:25 http: TLS handshake error from 10.129.0.1:43314: EOF 2018/07/25 08:39:35 http: TLS handshake error from 10.129.0.1:43322: EOF 2018/07/25 08:39:45 http: TLS handshake error from 10.129.0.1:43330: EOF 2018/07/25 08:39:55 http: TLS handshake error from 10.129.0.1:43338: EOF 2018/07/25 08:40:05 http: TLS handshake error from 10.129.0.1:43346: EOF 2018/07/25 08:40:15 http: TLS handshake error from 10.129.0.1:43354: EOF 2018/07/25 08:40:25 http: TLS handshake error from 10.129.0.1:43362: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:40:03.874541Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj8ds2cd kind= uid=3e71a5a2-8fe6-11e8-bb2b-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=error timestamp=2018-07-25T08:40:03.874812Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj8ds2cd kind= uid=3e71a5a2-8fe6-11e8-bb2b-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmimzkj8ds2cd" level=info timestamp=2018-07-25T08:40:03.874948Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj8ds2cd kind= uid=3e71a5a2-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:40:03.875140Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj8ds2cd kind= uid=3e71a5a2-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:40:03.889402Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj8d22fm kind= uid=3e7271c4-8fe6-11e8-bb2b-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=error timestamp=2018-07-25T08:40:03.889484Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj8d22fm kind= uid=3e7271c4-8fe6-11e8-bb2b-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmimzkj8d22fm" level=info timestamp=2018-07-25T08:40:03.889568Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj8d22fm kind= uid=3e7271c4-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:40:03.889627Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj8d22fm kind= uid=3e7271c4-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:40:04.016571Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimzkj8ds2cd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimzkj8ds2cd" level=info timestamp=2018-07-25T08:40:04.065981Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimzkj8d22fm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimzkj8d22fm" level=info timestamp=2018-07-25T08:40:04.262804Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimzkj8ds2cd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimzkj8ds2cd" level=error timestamp=2018-07-25T08:40:28.943183Z pos=replicaset.go:225 component=virt-controller service=http namespace=kubevirt-test-default name=replicasetl6cqb kind= uid=2c782674-8fe6-11e8-bb2b-525500d15501 msg="Scaling the replicaset failed." level=info timestamp=2018-07-25T08:40:32.596367Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiskr8x kind= uid=5291108d-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:40:32.596840Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiskr8x kind= uid=5291108d-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:40:32.826383Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiskr8x\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiskr8x" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:40:02.777998Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.778126Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797192Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797346Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.797555Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.802503Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.802683Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.861889Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.862133Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.891552Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.891700Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.935696Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.936112Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.959368Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.959702Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:39:16.680861Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:39:16.685793Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.661562Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.664291Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:01.672691Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmik7jkl" level=info timestamp=2018-07-25T08:40:01.920465Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-25T08:40:01.920795Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-25T08:40:01.925608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.925743Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.925779Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:02.109245Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.181179Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.181887Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.377832Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.380781Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiskr8x-xhklm Pod phase: Pending • Failure in Spec Setup (BeforeEach) [31.866 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.StatusError | 0xc42083ccf0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 ------------------------------ Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running level=info timestamp=2018-07-25T08:39:09.280271Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:39:17 http: TLS handshake error from 10.129.0.1:43640: EOF 2018/07/25 08:39:27 http: TLS handshake error from 10.129.0.1:43648: EOF 2018/07/25 08:39:37 http: TLS handshake error from 10.129.0.1:43656: EOF level=info timestamp=2018-07-25T08:39:39.266694Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:39:47 http: TLS handshake error from 10.129.0.1:43664: EOF 2018/07/25 08:39:57 http: TLS handshake error from 10.129.0.1:43672: EOF 2018/07/25 08:40:07 http: TLS handshake error from 10.129.0.1:43680: EOF level=info timestamp=2018-07-25T08:40:09.385496Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:40:17 http: TLS handshake error from 10.129.0.1:43688: EOF 2018/07/25 08:40:27 http: TLS handshake error from 10.129.0.1:43696: EOF 2018/07/25 08:40:37 http: TLS handshake error from 10.129.0.1:43704: EOF level=info timestamp=2018-07-25T08:40:39.219574Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:40:47 http: TLS handshake error from 10.129.0.1:43712: EOF 2018/07/25 08:40:57 http: TLS handshake error from 10.129.0.1:43720: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running level=info timestamp=2018-07-25T08:38:39.459117Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:38:45 http: TLS handshake error from 10.129.0.1:43282: EOF 2018/07/25 08:38:55 http: TLS handshake error from 10.129.0.1:43290: EOF 2018/07/25 08:39:05 http: TLS handshake error from 10.129.0.1:43298: EOF 2018/07/25 08:39:15 http: TLS handshake error from 10.129.0.1:43306: EOF 2018/07/25 08:39:25 http: TLS handshake error from 10.129.0.1:43314: EOF 2018/07/25 08:39:35 http: TLS handshake error from 10.129.0.1:43322: EOF 2018/07/25 08:39:45 http: TLS handshake error from 10.129.0.1:43330: EOF 2018/07/25 08:39:55 http: TLS handshake error from 10.129.0.1:43338: EOF 2018/07/25 08:40:05 http: TLS handshake error from 10.129.0.1:43346: EOF 2018/07/25 08:40:15 http: TLS handshake error from 10.129.0.1:43354: EOF 2018/07/25 08:40:25 http: TLS handshake error from 10.129.0.1:43362: EOF 2018/07/25 08:40:35 http: TLS handshake error from 10.129.0.1:43370: EOF 2018/07/25 08:40:45 http: TLS handshake error from 10.129.0.1:43378: EOF 2018/07/25 08:40:55 http: TLS handshake error from 10.129.0.1:43386: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:40:46.161435Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj86c2wn kind= uid=5068ff56-8fe6-11e8-bb2b-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=error timestamp=2018-07-25T08:40:46.183319Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj86c2wn kind= uid=5068ff56-8fe6-11e8-bb2b-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmimzkj86c2wn" level=info timestamp=2018-07-25T08:40:46.161821Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj86c2wn kind= uid=5068ff56-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:40:46.186131Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj86c2wn kind= uid=5068ff56-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:40:46.205553Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj8jfshg kind= uid=5069b25c-8fe6-11e8-bb2b-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=error timestamp=2018-07-25T08:40:46.205741Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj8jfshg kind= uid=5069b25c-8fe6-11e8-bb2b-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmimzkj8jfshg" level=info timestamp=2018-07-25T08:40:46.205925Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj8jfshg kind= uid=5069b25c-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:40:46.208698Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj8jfshg kind= uid=5069b25c-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:40:46.469080Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimzkj8jfshg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimzkj8jfshg" level=info timestamp=2018-07-25T08:40:46.482793Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimzkj86c2wn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimzkj86c2wn" level=info timestamp=2018-07-25T08:40:46.628732Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimzkj8jfshg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimzkj8jfshg" level=info timestamp=2018-07-25T08:40:46.630252Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimzkj86c2wn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimzkj86c2wn" level=info timestamp=2018-07-25T08:41:03.289429Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6279g kind= uid=64e1b38f-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:41:03.297069Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6279g kind= uid=64e1b38f-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:41:03.604907Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6279g\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6279g" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:40:02.777998Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.778126Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797192Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797346Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.797555Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.802503Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.802683Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.861889Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.862133Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.891552Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.891700Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.935696Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.936112Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.959368Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.959702Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:39:16.680861Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:39:16.685793Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.661562Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.664291Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:01.672691Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmik7jkl" level=info timestamp=2018-07-25T08:40:01.920465Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-25T08:40:01.920795Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-25T08:40:01.925608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.925743Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.925779Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:02.109245Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.181179Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.181887Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.377832Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.380781Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi6279g-hrpls Pod phase: Pending • Failure in Spec Setup (BeforeEach) [30.761 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance with pod network connectivity explicitly set /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.StatusError | 0xc42083d3b0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 ------------------------------ Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:39:37 http: TLS handshake error from 10.129.0.1:43656: EOF level=info timestamp=2018-07-25T08:39:39.266694Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:39:47 http: TLS handshake error from 10.129.0.1:43664: EOF 2018/07/25 08:39:57 http: TLS handshake error from 10.129.0.1:43672: EOF 2018/07/25 08:40:07 http: TLS handshake error from 10.129.0.1:43680: EOF level=info timestamp=2018-07-25T08:40:09.385496Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:40:17 http: TLS handshake error from 10.129.0.1:43688: EOF 2018/07/25 08:40:27 http: TLS handshake error from 10.129.0.1:43696: EOF 2018/07/25 08:40:37 http: TLS handshake error from 10.129.0.1:43704: EOF level=info timestamp=2018-07-25T08:40:39.219574Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:40:47 http: TLS handshake error from 10.129.0.1:43712: EOF 2018/07/25 08:40:57 http: TLS handshake error from 10.129.0.1:43720: EOF 2018/07/25 08:41:07 http: TLS handshake error from 10.129.0.1:43730: EOF 2018/07/25 08:41:17 http: TLS handshake error from 10.129.0.1:43738: EOF 2018/07/25 08:41:27 http: TLS handshake error from 10.129.0.1:43746: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running 2018/07/25 08:39:15 http: TLS handshake error from 10.129.0.1:43306: EOF 2018/07/25 08:39:25 http: TLS handshake error from 10.129.0.1:43314: EOF 2018/07/25 08:39:35 http: TLS handshake error from 10.129.0.1:43322: EOF 2018/07/25 08:39:45 http: TLS handshake error from 10.129.0.1:43330: EOF 2018/07/25 08:39:55 http: TLS handshake error from 10.129.0.1:43338: EOF 2018/07/25 08:40:05 http: TLS handshake error from 10.129.0.1:43346: EOF 2018/07/25 08:40:15 http: TLS handshake error from 10.129.0.1:43354: EOF 2018/07/25 08:40:25 http: TLS handshake error from 10.129.0.1:43362: EOF 2018/07/25 08:40:35 http: TLS handshake error from 10.129.0.1:43370: EOF 2018/07/25 08:40:45 http: TLS handshake error from 10.129.0.1:43378: EOF 2018/07/25 08:40:55 http: TLS handshake error from 10.129.0.1:43386: EOF 2018/07/25 08:41:05 http: TLS handshake error from 10.129.0.1:43396: EOF level=info timestamp=2018-07-25T08:41:09.330688Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:41:15 http: TLS handshake error from 10.129.0.1:43404: EOF 2018/07/25 08:41:25 http: TLS handshake error from 10.129.0.1:43412: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:40:46.205553Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj8jfshg kind= uid=5069b25c-8fe6-11e8-bb2b-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=error timestamp=2018-07-25T08:40:46.205741Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj8jfshg kind= uid=5069b25c-8fe6-11e8-bb2b-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmimzkj8jfshg" level=info timestamp=2018-07-25T08:40:46.205925Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj8jfshg kind= uid=5069b25c-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:40:46.208698Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj8jfshg kind= uid=5069b25c-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:40:46.469080Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimzkj8jfshg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimzkj8jfshg" level=info timestamp=2018-07-25T08:40:46.482793Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimzkj86c2wn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimzkj86c2wn" level=info timestamp=2018-07-25T08:40:46.628732Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimzkj8jfshg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimzkj8jfshg" level=info timestamp=2018-07-25T08:40:46.630252Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimzkj86c2wn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimzkj86c2wn" level=info timestamp=2018-07-25T08:41:03.289429Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6279g kind= uid=64e1b38f-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:41:03.297069Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6279g kind= uid=64e1b38f-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:41:03.604907Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6279g\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6279g" level=info timestamp=2018-07-25T08:41:34.077366Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmid4rtj kind= uid=773909ea-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:41:34.078070Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmid4rtj kind= uid=773909ea-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:41:34.423623Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmid4rtj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmid4rtj" level=info timestamp=2018-07-25T08:41:34.470184Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmid4rtj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmid4rtj" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:40:02.777998Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.778126Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797192Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797346Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.797555Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.802503Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.802683Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.861889Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.862133Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.891552Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.891700Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.935696Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.936112Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.959368Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.959702Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:39:16.680861Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:39:16.685793Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.661562Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.664291Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:01.672691Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmik7jkl" level=info timestamp=2018-07-25T08:40:01.920465Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-25T08:40:01.920795Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-25T08:40:01.925608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.925743Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.925779Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:02.109245Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.181179Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.181887Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.377832Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.380781Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmid4rtj-bvrd4 Pod phase: Pending • Failure in Spec Setup (BeforeEach) [30.841 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance with custom MAC address /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.StatusError | 0xc420b942d0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 ------------------------------ Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:40:07 http: TLS handshake error from 10.129.0.1:43680: EOF level=info timestamp=2018-07-25T08:40:09.385496Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:40:17 http: TLS handshake error from 10.129.0.1:43688: EOF 2018/07/25 08:40:27 http: TLS handshake error from 10.129.0.1:43696: EOF 2018/07/25 08:40:37 http: TLS handshake error from 10.129.0.1:43704: EOF level=info timestamp=2018-07-25T08:40:39.219574Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:40:47 http: TLS handshake error from 10.129.0.1:43712: EOF 2018/07/25 08:40:57 http: TLS handshake error from 10.129.0.1:43720: EOF 2018/07/25 08:41:07 http: TLS handshake error from 10.129.0.1:43730: EOF 2018/07/25 08:41:17 http: TLS handshake error from 10.129.0.1:43738: EOF 2018/07/25 08:41:27 http: TLS handshake error from 10.129.0.1:43746: EOF 2018/07/25 08:41:37 http: TLS handshake error from 10.129.0.1:43754: EOF level=info timestamp=2018-07-25T08:41:39.266273Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:41:47 http: TLS handshake error from 10.129.0.1:43762: EOF 2018/07/25 08:41:57 http: TLS handshake error from 10.129.0.1:43770: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running 2018/07/25 08:39:55 http: TLS handshake error from 10.129.0.1:43338: EOF 2018/07/25 08:40:05 http: TLS handshake error from 10.129.0.1:43346: EOF 2018/07/25 08:40:15 http: TLS handshake error from 10.129.0.1:43354: EOF 2018/07/25 08:40:25 http: TLS handshake error from 10.129.0.1:43362: EOF 2018/07/25 08:40:35 http: TLS handshake error from 10.129.0.1:43370: EOF 2018/07/25 08:40:45 http: TLS handshake error from 10.129.0.1:43378: EOF 2018/07/25 08:40:55 http: TLS handshake error from 10.129.0.1:43386: EOF 2018/07/25 08:41:05 http: TLS handshake error from 10.129.0.1:43396: EOF level=info timestamp=2018-07-25T08:41:09.330688Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:41:15 http: TLS handshake error from 10.129.0.1:43404: EOF 2018/07/25 08:41:25 http: TLS handshake error from 10.129.0.1:43412: EOF 2018/07/25 08:41:35 http: TLS handshake error from 10.129.0.1:43420: EOF 2018/07/25 08:41:45 http: TLS handshake error from 10.129.0.1:43428: EOF 2018/07/25 08:41:55 http: TLS handshake error from 10.129.0.1:43436: EOF 2018/07/25 08:42:05 http: TLS handshake error from 10.129.0.1:43444: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:40:46.208698Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimzkj8jfshg kind= uid=5069b25c-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:40:46.469080Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimzkj8jfshg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimzkj8jfshg" level=info timestamp=2018-07-25T08:40:46.482793Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimzkj86c2wn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimzkj86c2wn" level=info timestamp=2018-07-25T08:40:46.628732Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimzkj8jfshg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimzkj8jfshg" level=info timestamp=2018-07-25T08:40:46.630252Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimzkj86c2wn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimzkj86c2wn" level=info timestamp=2018-07-25T08:41:03.289429Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6279g kind= uid=64e1b38f-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:41:03.297069Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6279g kind= uid=64e1b38f-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:41:03.604907Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6279g\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6279g" level=info timestamp=2018-07-25T08:41:34.077366Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmid4rtj kind= uid=773909ea-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:41:34.078070Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmid4rtj kind= uid=773909ea-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:41:34.423623Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmid4rtj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmid4rtj" level=info timestamp=2018-07-25T08:41:34.470184Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmid4rtj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmid4rtj" level=info timestamp=2018-07-25T08:41:34.751994Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmid4rtj\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmid4rtj, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 773909ea-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmid4rtj" level=info timestamp=2018-07-25T08:42:04.925553Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvminsgmd kind= uid=899a44bc-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:42:04.929791Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvminsgmd kind= uid=899a44bc-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:40:02.777998Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.778126Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797192Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797346Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.797555Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.802503Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.802683Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.861889Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.862133Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.891552Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.891700Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.935696Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.936112Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.959368Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.959702Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:39:16.680861Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:39:16.685793Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.661562Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.664291Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:01.672691Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmik7jkl" level=info timestamp=2018-07-25T08:40:01.920465Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-25T08:40:01.920795Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-25T08:40:01.925608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.925743Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.925779Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:02.109245Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.181179Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.181887Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.377832Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.380781Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvminsgmd-f9wbc Pod phase: Pending • Failure in Spec Setup (BeforeEach) [30.694 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the internet /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.StatusError | 0xc420153440>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 ------------------------------ Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:40:27 http: TLS handshake error from 10.129.0.1:43696: EOF 2018/07/25 08:40:37 http: TLS handshake error from 10.129.0.1:43704: EOF level=info timestamp=2018-07-25T08:40:39.219574Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:40:47 http: TLS handshake error from 10.129.0.1:43712: EOF 2018/07/25 08:40:57 http: TLS handshake error from 10.129.0.1:43720: EOF 2018/07/25 08:41:07 http: TLS handshake error from 10.129.0.1:43730: EOF 2018/07/25 08:41:17 http: TLS handshake error from 10.129.0.1:43738: EOF 2018/07/25 08:41:27 http: TLS handshake error from 10.129.0.1:43746: EOF 2018/07/25 08:41:37 http: TLS handshake error from 10.129.0.1:43754: EOF level=info timestamp=2018-07-25T08:41:39.266273Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:41:47 http: TLS handshake error from 10.129.0.1:43762: EOF 2018/07/25 08:41:57 http: TLS handshake error from 10.129.0.1:43770: EOF 2018/07/25 08:42:07 http: TLS handshake error from 10.129.0.1:43778: EOF 2018/07/25 08:42:17 http: TLS handshake error from 10.129.0.1:43786: EOF 2018/07/25 08:42:27 http: TLS handshake error from 10.129.0.1:43794: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running 2018/07/25 08:40:35 http: TLS handshake error from 10.129.0.1:43370: EOF 2018/07/25 08:40:45 http: TLS handshake error from 10.129.0.1:43378: EOF 2018/07/25 08:40:55 http: TLS handshake error from 10.129.0.1:43386: EOF 2018/07/25 08:41:05 http: TLS handshake error from 10.129.0.1:43396: EOF level=info timestamp=2018-07-25T08:41:09.330688Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:41:15 http: TLS handshake error from 10.129.0.1:43404: EOF 2018/07/25 08:41:25 http: TLS handshake error from 10.129.0.1:43412: EOF 2018/07/25 08:41:35 http: TLS handshake error from 10.129.0.1:43420: EOF 2018/07/25 08:41:45 http: TLS handshake error from 10.129.0.1:43428: EOF 2018/07/25 08:41:55 http: TLS handshake error from 10.129.0.1:43436: EOF 2018/07/25 08:42:05 http: TLS handshake error from 10.129.0.1:43444: EOF level=info timestamp=2018-07-25T08:42:09.466577Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:42:15 http: TLS handshake error from 10.129.0.1:43452: EOF 2018/07/25 08:42:25 http: TLS handshake error from 10.129.0.1:43460: EOF 2018/07/25 08:42:35 http: TLS handshake error from 10.129.0.1:43468: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:41:03.289429Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6279g kind= uid=64e1b38f-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:41:03.297069Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6279g kind= uid=64e1b38f-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:41:03.604907Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6279g\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6279g" level=info timestamp=2018-07-25T08:41:34.077366Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmid4rtj kind= uid=773909ea-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:41:34.078070Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmid4rtj kind= uid=773909ea-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:41:34.423623Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmid4rtj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmid4rtj" level=info timestamp=2018-07-25T08:41:34.470184Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmid4rtj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmid4rtj" level=info timestamp=2018-07-25T08:41:34.751994Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmid4rtj\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmid4rtj, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 773909ea-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmid4rtj" level=info timestamp=2018-07-25T08:42:04.925553Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvminsgmd kind= uid=899a44bc-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:42:04.929791Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvminsgmd kind= uid=899a44bc-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:42:05.410761Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminsgmd\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvminsgmd, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 899a44bc-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminsgmd" level=info timestamp=2018-07-25T08:42:35.692873Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic4ts2 kind= uid=9be5b628-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:42:35.695078Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic4ts2 kind= uid=9be5b628-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:42:35.938188Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic4ts2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic4ts2" level=info timestamp=2018-07-25T08:42:35.965886Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic4ts2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic4ts2" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:40:02.777998Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.778126Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797192Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797346Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.797555Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.802503Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.802683Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.861889Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.862133Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.891552Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.891700Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.935696Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.936112Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.959368Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.959702Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:39:16.680861Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:39:16.685793Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.661562Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.664291Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:01.672691Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmik7jkl" level=info timestamp=2018-07-25T08:40:01.920465Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-25T08:40:01.920795Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-25T08:40:01.925608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.925743Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.925779Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:02.109245Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.181179Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.181887Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.377832Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.380781Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmic4ts2-k9xsq Pod phase: Pending • Failure in Spec Setup (BeforeEach) [30.805 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on the same node from Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.StatusError | 0xc420b95560>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 ------------------------------ Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:40:57 http: TLS handshake error from 10.129.0.1:43720: EOF 2018/07/25 08:41:07 http: TLS handshake error from 10.129.0.1:43730: EOF 2018/07/25 08:41:17 http: TLS handshake error from 10.129.0.1:43738: EOF 2018/07/25 08:41:27 http: TLS handshake error from 10.129.0.1:43746: EOF 2018/07/25 08:41:37 http: TLS handshake error from 10.129.0.1:43754: EOF level=info timestamp=2018-07-25T08:41:39.266273Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:41:47 http: TLS handshake error from 10.129.0.1:43762: EOF 2018/07/25 08:41:57 http: TLS handshake error from 10.129.0.1:43770: EOF 2018/07/25 08:42:07 http: TLS handshake error from 10.129.0.1:43778: EOF 2018/07/25 08:42:17 http: TLS handshake error from 10.129.0.1:43786: EOF 2018/07/25 08:42:27 http: TLS handshake error from 10.129.0.1:43794: EOF 2018/07/25 08:42:37 http: TLS handshake error from 10.129.0.1:43802: EOF level=info timestamp=2018-07-25T08:42:39.520693Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:42:47 http: TLS handshake error from 10.129.0.1:43810: EOF 2018/07/25 08:42:57 http: TLS handshake error from 10.129.0.1:43818: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running 2018/07/25 08:41:05 http: TLS handshake error from 10.129.0.1:43396: EOF level=info timestamp=2018-07-25T08:41:09.330688Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:41:15 http: TLS handshake error from 10.129.0.1:43404: EOF 2018/07/25 08:41:25 http: TLS handshake error from 10.129.0.1:43412: EOF 2018/07/25 08:41:35 http: TLS handshake error from 10.129.0.1:43420: EOF 2018/07/25 08:41:45 http: TLS handshake error from 10.129.0.1:43428: EOF 2018/07/25 08:41:55 http: TLS handshake error from 10.129.0.1:43436: EOF 2018/07/25 08:42:05 http: TLS handshake error from 10.129.0.1:43444: EOF level=info timestamp=2018-07-25T08:42:09.466577Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:42:15 http: TLS handshake error from 10.129.0.1:43452: EOF 2018/07/25 08:42:25 http: TLS handshake error from 10.129.0.1:43460: EOF 2018/07/25 08:42:35 http: TLS handshake error from 10.129.0.1:43468: EOF 2018/07/25 08:42:45 http: TLS handshake error from 10.129.0.1:43476: EOF 2018/07/25 08:42:55 http: TLS handshake error from 10.129.0.1:43484: EOF 2018/07/25 08:43:05 http: TLS handshake error from 10.129.0.1:43492: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:41:34.470184Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmid4rtj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmid4rtj" level=info timestamp=2018-07-25T08:41:34.751994Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmid4rtj\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmid4rtj, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 773909ea-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmid4rtj" level=info timestamp=2018-07-25T08:42:04.925553Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvminsgmd kind= uid=899a44bc-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:42:04.929791Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvminsgmd kind= uid=899a44bc-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:42:05.410761Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminsgmd\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvminsgmd, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 899a44bc-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminsgmd" level=info timestamp=2018-07-25T08:42:35.692873Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic4ts2 kind= uid=9be5b628-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:42:35.695078Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic4ts2 kind= uid=9be5b628-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:42:35.938188Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic4ts2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic4ts2" level=info timestamp=2018-07-25T08:42:35.965886Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic4ts2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic4ts2" level=info timestamp=2018-07-25T08:42:36.013317Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic4ts2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic4ts2" level=info timestamp=2018-07-25T08:42:36.308766Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic4ts2\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmic4ts2, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 9be5b628-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic4ts2" level=info timestamp=2018-07-25T08:43:06.497719Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilgjrw kind= uid=ae4d6121-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:43:06.498279Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilgjrw kind= uid=ae4d6121-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:43:06.951266Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilgjrw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilgjrw" level=info timestamp=2018-07-25T08:43:06.997657Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilgjrw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilgjrw" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:40:02.777998Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.778126Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797192Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797346Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.797555Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.802503Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.802683Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.861889Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.862133Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.891552Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.891700Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.935696Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.936112Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.959368Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.959702Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:39:16.680861Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:39:16.685793Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.661562Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.664291Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:01.672691Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmik7jkl" level=info timestamp=2018-07-25T08:40:01.920465Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-25T08:40:01.920795Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-25T08:40:01.925608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.925743Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.925779Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:02.109245Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.181179Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.181887Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.377832Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.380781Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmilgjrw-pj5r6 Pod phase: Pending • Failure in Spec Setup (BeforeEach) [30.954 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on a different node from Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.StatusError | 0xc42083c240>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 ------------------------------ Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:41:37 http: TLS handshake error from 10.129.0.1:43754: EOF level=info timestamp=2018-07-25T08:41:39.266273Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:41:47 http: TLS handshake error from 10.129.0.1:43762: EOF 2018/07/25 08:41:57 http: TLS handshake error from 10.129.0.1:43770: EOF 2018/07/25 08:42:07 http: TLS handshake error from 10.129.0.1:43778: EOF 2018/07/25 08:42:17 http: TLS handshake error from 10.129.0.1:43786: EOF 2018/07/25 08:42:27 http: TLS handshake error from 10.129.0.1:43794: EOF 2018/07/25 08:42:37 http: TLS handshake error from 10.129.0.1:43802: EOF level=info timestamp=2018-07-25T08:42:39.520693Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:42:47 http: TLS handshake error from 10.129.0.1:43810: EOF 2018/07/25 08:42:57 http: TLS handshake error from 10.129.0.1:43818: EOF 2018/07/25 08:43:07 http: TLS handshake error from 10.129.0.1:43826: EOF 2018/07/25 08:43:17 http: TLS handshake error from 10.129.0.1:43834: EOF 2018/07/25 08:43:27 http: TLS handshake error from 10.129.0.1:43842: EOF 2018/07/25 08:43:37 http: TLS handshake error from 10.129.0.1:43850: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running 2018/07/25 08:41:35 http: TLS handshake error from 10.129.0.1:43420: EOF 2018/07/25 08:41:45 http: TLS handshake error from 10.129.0.1:43428: EOF 2018/07/25 08:41:55 http: TLS handshake error from 10.129.0.1:43436: EOF 2018/07/25 08:42:05 http: TLS handshake error from 10.129.0.1:43444: EOF level=info timestamp=2018-07-25T08:42:09.466577Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:42:15 http: TLS handshake error from 10.129.0.1:43452: EOF 2018/07/25 08:42:25 http: TLS handshake error from 10.129.0.1:43460: EOF 2018/07/25 08:42:35 http: TLS handshake error from 10.129.0.1:43468: EOF 2018/07/25 08:42:45 http: TLS handshake error from 10.129.0.1:43476: EOF 2018/07/25 08:42:55 http: TLS handshake error from 10.129.0.1:43484: EOF 2018/07/25 08:43:05 http: TLS handshake error from 10.129.0.1:43492: EOF level=info timestamp=2018-07-25T08:43:09.149014Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:43:15 http: TLS handshake error from 10.129.0.1:43500: EOF 2018/07/25 08:43:25 http: TLS handshake error from 10.129.0.1:43508: EOF 2018/07/25 08:43:35 http: TLS handshake error from 10.129.0.1:43516: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:42:04.929791Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvminsgmd kind= uid=899a44bc-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:42:05.410761Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminsgmd\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvminsgmd, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 899a44bc-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminsgmd" level=info timestamp=2018-07-25T08:42:35.692873Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic4ts2 kind= uid=9be5b628-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:42:35.695078Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic4ts2 kind= uid=9be5b628-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:42:35.938188Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic4ts2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic4ts2" level=info timestamp=2018-07-25T08:42:35.965886Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic4ts2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic4ts2" level=info timestamp=2018-07-25T08:42:36.013317Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic4ts2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic4ts2" level=info timestamp=2018-07-25T08:42:36.308766Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic4ts2\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmic4ts2, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 9be5b628-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic4ts2" level=info timestamp=2018-07-25T08:43:06.497719Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilgjrw kind= uid=ae4d6121-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:43:06.498279Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilgjrw kind= uid=ae4d6121-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:43:06.951266Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilgjrw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilgjrw" level=info timestamp=2018-07-25T08:43:06.997657Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilgjrw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilgjrw" level=info timestamp=2018-07-25T08:43:37.394305Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz5d6w kind= uid=c0b6af80-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:43:37.402777Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz5d6w kind= uid=c0b6af80-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:43:37.744444Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz5d6w\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz5d6w" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:40:02.777998Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.778126Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797192Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797346Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.797555Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.802503Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.802683Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.861889Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.862133Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.891552Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.891700Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.935696Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.936112Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.959368Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.959702Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:39:16.680861Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:39:16.685793Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.661562Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.664291Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:01.672691Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmik7jkl" level=info timestamp=2018-07-25T08:40:01.920465Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-25T08:40:01.920795Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-25T08:40:01.925608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.925743Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.925779Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:02.109245Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.181179Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.181887Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.377832Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.380781Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiz5d6w-r7vqr Pod phase: Pending • Failure in Spec Setup (BeforeEach) [30.836 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on the same node from Node /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.StatusError | 0xc420b94a20>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 ------------------------------ Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:42:07 http: TLS handshake error from 10.129.0.1:43778: EOF 2018/07/25 08:42:17 http: TLS handshake error from 10.129.0.1:43786: EOF 2018/07/25 08:42:27 http: TLS handshake error from 10.129.0.1:43794: EOF 2018/07/25 08:42:37 http: TLS handshake error from 10.129.0.1:43802: EOF level=info timestamp=2018-07-25T08:42:39.520693Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:42:47 http: TLS handshake error from 10.129.0.1:43810: EOF 2018/07/25 08:42:57 http: TLS handshake error from 10.129.0.1:43818: EOF 2018/07/25 08:43:07 http: TLS handshake error from 10.129.0.1:43826: EOF 2018/07/25 08:43:17 http: TLS handshake error from 10.129.0.1:43834: EOF 2018/07/25 08:43:27 http: TLS handshake error from 10.129.0.1:43842: EOF 2018/07/25 08:43:37 http: TLS handshake error from 10.129.0.1:43850: EOF level=info timestamp=2018-07-25T08:43:39.420409Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:43:47 http: TLS handshake error from 10.129.0.1:43858: EOF 2018/07/25 08:43:57 http: TLS handshake error from 10.129.0.1:43866: EOF 2018/07/25 08:44:07 http: TLS handshake error from 10.129.0.1:43874: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running 2018/07/25 08:42:05 http: TLS handshake error from 10.129.0.1:43444: EOF level=info timestamp=2018-07-25T08:42:09.466577Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:42:15 http: TLS handshake error from 10.129.0.1:43452: EOF 2018/07/25 08:42:25 http: TLS handshake error from 10.129.0.1:43460: EOF 2018/07/25 08:42:35 http: TLS handshake error from 10.129.0.1:43468: EOF 2018/07/25 08:42:45 http: TLS handshake error from 10.129.0.1:43476: EOF 2018/07/25 08:42:55 http: TLS handshake error from 10.129.0.1:43484: EOF 2018/07/25 08:43:05 http: TLS handshake error from 10.129.0.1:43492: EOF level=info timestamp=2018-07-25T08:43:09.149014Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:43:15 http: TLS handshake error from 10.129.0.1:43500: EOF 2018/07/25 08:43:25 http: TLS handshake error from 10.129.0.1:43508: EOF 2018/07/25 08:43:35 http: TLS handshake error from 10.129.0.1:43516: EOF 2018/07/25 08:43:45 http: TLS handshake error from 10.129.0.1:43524: EOF 2018/07/25 08:43:55 http: TLS handshake error from 10.129.0.1:43532: EOF 2018/07/25 08:44:05 http: TLS handshake error from 10.129.0.1:43540: EOF Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:42:35.965886Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic4ts2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic4ts2" level=info timestamp=2018-07-25T08:42:36.013317Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic4ts2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic4ts2" level=info timestamp=2018-07-25T08:42:36.308766Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic4ts2\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmic4ts2, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 9be5b628-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic4ts2" level=info timestamp=2018-07-25T08:43:06.497719Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilgjrw kind= uid=ae4d6121-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:43:06.498279Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilgjrw kind= uid=ae4d6121-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:43:06.951266Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilgjrw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilgjrw" level=info timestamp=2018-07-25T08:43:06.997657Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilgjrw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilgjrw" level=info timestamp=2018-07-25T08:43:37.394305Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz5d6w kind= uid=c0b6af80-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:43:37.402777Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz5d6w kind= uid=c0b6af80-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:43:37.744444Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz5d6w\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz5d6w" level=info timestamp=2018-07-25T08:43:38.030735Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz5d6w\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiz5d6w, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: c0b6af80-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz5d6w" level=info timestamp=2018-07-25T08:44:08.187905Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqqpwt kind= uid=d316ed77-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:44:08.190194Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqqpwt kind= uid=d316ed77-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:44:08.554142Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqqpwt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqqpwt" level=info timestamp=2018-07-25T08:44:08.616337Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqqpwt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqqpwt" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:40:02.777998Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.778126Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797192Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797346Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.797555Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.802503Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.802683Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.861889Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.862133Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.891552Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.891700Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.935696Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.936112Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.959368Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.959702Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:39:16.680861Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:39:16.685793Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.661562Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.664291Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:01.672691Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmik7jkl" level=info timestamp=2018-07-25T08:40:01.920465Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-25T08:40:01.920795Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-25T08:40:01.925608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.925743Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.925779Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:02.109245Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.181179Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.181887Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.377832Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.380781Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiqqpwt-gvvvg Pod phase: Pending • Failure in Spec Setup (BeforeEach) [30.958 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on a different node from Node /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.StatusError | 0xc420b954d0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 ------------------------------ Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running level=info timestamp=2018-07-25T08:42:39.520693Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:42:47 http: TLS handshake error from 10.129.0.1:43810: EOF 2018/07/25 08:42:57 http: TLS handshake error from 10.129.0.1:43818: EOF 2018/07/25 08:43:07 http: TLS handshake error from 10.129.0.1:43826: EOF 2018/07/25 08:43:17 http: TLS handshake error from 10.129.0.1:43834: EOF 2018/07/25 08:43:27 http: TLS handshake error from 10.129.0.1:43842: EOF 2018/07/25 08:43:37 http: TLS handshake error from 10.129.0.1:43850: EOF level=info timestamp=2018-07-25T08:43:39.420409Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:43:47 http: TLS handshake error from 10.129.0.1:43858: EOF 2018/07/25 08:43:57 http: TLS handshake error from 10.129.0.1:43866: EOF 2018/07/25 08:44:07 http: TLS handshake error from 10.129.0.1:43874: EOF 2018/07/25 08:44:17 http: TLS handshake error from 10.129.0.1:43882: EOF 2018/07/25 08:44:27 http: TLS handshake error from 10.129.0.1:43890: EOF 2018/07/25 08:44:37 http: TLS handshake error from 10.129.0.1:43900: EOF level=info timestamp=2018-07-25T08:44:39.415368Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-wfvcn Pod phase: Running 2018/07/25 08:43:45 http: TLS handshake error from 10.129.0.1:43524: EOF 2018/07/25 08:43:55 http: TLS handshake error from 10.129.0.1:43532: EOF 2018/07/25 08:44:05 http: TLS handshake error from 10.129.0.1:43540: EOF level=info timestamp=2018-07-25T08:44:09.345655Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-25T08:44:15.305276Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/25 08:44:15 http: TLS handshake error from 10.129.0.1:43548: EOF level=info timestamp=2018-07-25T08:44:25.234330Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/25 08:44:25 http: TLS handshake error from 10.129.0.1:43556: EOF level=info timestamp=2018-07-25T08:44:31.832331Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:44:31.867498Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:44:31.897662Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:44:33.738271Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-25T08:44:33.743603Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/25 08:44:35 http: TLS handshake error from 10.129.0.1:43566: EOF level=info timestamp=2018-07-25T08:44:35.412103Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:43:06.951266Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilgjrw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilgjrw" level=info timestamp=2018-07-25T08:43:06.997657Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilgjrw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilgjrw" level=info timestamp=2018-07-25T08:43:37.394305Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz5d6w kind= uid=c0b6af80-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:43:37.402777Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz5d6w kind= uid=c0b6af80-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:43:37.744444Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz5d6w\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz5d6w" level=info timestamp=2018-07-25T08:43:38.030735Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz5d6w\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiz5d6w, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: c0b6af80-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz5d6w" level=info timestamp=2018-07-25T08:44:08.187905Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqqpwt kind= uid=d316ed77-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:44:08.190194Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqqpwt kind= uid=d316ed77-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:44:08.554142Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqqpwt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqqpwt" level=info timestamp=2018-07-25T08:44:08.616337Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqqpwt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqqpwt" level=info timestamp=2018-07-25T08:44:08.982583Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqqpwt\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqqpwt, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d316ed77-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqqpwt" level=info timestamp=2018-07-25T08:44:39.215028Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitglqx kind= uid=e58b1933-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:44:39.223815Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitglqx kind= uid=e58b1933-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:44:39.481450Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitglqx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitglqx" level=info timestamp=2018-07-25T08:44:39.517191Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitglqx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitglqx" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:40:02.777998Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.778126Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797192Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797346Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.797555Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.802503Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.802683Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.861889Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.862133Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.891552Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.891700Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.935696Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.936112Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.959368Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.959702Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:39:16.680861Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:39:16.685793Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.661562Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.664291Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:01.672691Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmik7jkl" level=info timestamp=2018-07-25T08:40:01.920465Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-25T08:40:01.920795Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-25T08:40:01.925608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.925743Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.925779Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:02.109245Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.181179Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.181887Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.377832Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.380781Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmitglqx-lqq5x Pod phase: Pending • Failure in Spec Setup (BeforeEach) [30.817 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 with a service matching the vmi exposed [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:283 should be able to reach the vmi based on labels specified on the vmi /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:303 Expected error: <*errors.StatusError | 0xc420152990>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 ------------------------------ Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:43:27 http: TLS handshake error from 10.129.0.1:43842: EOF 2018/07/25 08:43:37 http: TLS handshake error from 10.129.0.1:43850: EOF level=info timestamp=2018-07-25T08:43:39.420409Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:43:47 http: TLS handshake error from 10.129.0.1:43858: EOF 2018/07/25 08:43:57 http: TLS handshake error from 10.129.0.1:43866: EOF 2018/07/25 08:44:07 http: TLS handshake error from 10.129.0.1:43874: EOF 2018/07/25 08:44:17 http: TLS handshake error from 10.129.0.1:43882: EOF 2018/07/25 08:44:27 http: TLS handshake error from 10.129.0.1:43890: EOF 2018/07/25 08:44:37 http: TLS handshake error from 10.129.0.1:43900: EOF level=info timestamp=2018-07-25T08:44:39.415368Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:44:47 http: TLS handshake error from 10.129.0.1:43908: EOF 2018/07/25 08:44:57 http: TLS handshake error from 10.129.0.1:43916: EOF 2018/07/25 08:45:07 http: TLS handshake error from 10.129.0.1:43924: EOF level=info timestamp=2018-07-25T08:45:08.810078Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-25T08:45:09.510111Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-wfvcn Pod phase: Running level=info timestamp=2018-07-25T08:44:31.897662Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:44:33.738271Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-25T08:44:33.743603Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/25 08:44:35 http: TLS handshake error from 10.129.0.1:43566: EOF level=info timestamp=2018-07-25T08:44:35.412103Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:44:45.316197Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:44:45.367313Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/25 08:44:45 http: TLS handshake error from 10.129.0.1:43574: EOF level=info timestamp=2018-07-25T08:44:45.509226Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:44:45.518587Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/25 08:44:55 http: TLS handshake error from 10.129.0.1:43582: EOF level=info timestamp=2018-07-25T08:44:55.705685Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/25 08:45:05 http: TLS handshake error from 10.129.0.1:43590: EOF level=info timestamp=2018-07-25T08:45:05.982191Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:45:08.891255Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:43:06.997657Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilgjrw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilgjrw" level=info timestamp=2018-07-25T08:43:37.394305Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz5d6w kind= uid=c0b6af80-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:43:37.402777Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz5d6w kind= uid=c0b6af80-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:43:37.744444Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz5d6w\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz5d6w" level=info timestamp=2018-07-25T08:43:38.030735Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz5d6w\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiz5d6w, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: c0b6af80-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz5d6w" level=info timestamp=2018-07-25T08:44:08.187905Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqqpwt kind= uid=d316ed77-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:44:08.190194Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqqpwt kind= uid=d316ed77-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:44:08.554142Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqqpwt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqqpwt" level=info timestamp=2018-07-25T08:44:08.616337Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqqpwt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqqpwt" level=info timestamp=2018-07-25T08:44:08.982583Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqqpwt\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqqpwt, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d316ed77-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqqpwt" level=info timestamp=2018-07-25T08:44:39.215028Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitglqx kind= uid=e58b1933-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:44:39.223815Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitglqx kind= uid=e58b1933-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:44:39.481450Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitglqx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitglqx" level=info timestamp=2018-07-25T08:44:39.517191Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitglqx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitglqx" level=info timestamp=2018-07-25T08:44:39.841668Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitglqx\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmitglqx, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: e58b1933-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitglqx" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:40:02.777998Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.778126Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797192Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797346Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.797555Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.802503Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.802683Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.861889Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.862133Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.891552Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.891700Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.935696Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.936112Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.959368Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.959702Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:39:16.680861Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:39:16.685793Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.661562Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.664291Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:01.672691Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmik7jkl" level=info timestamp=2018-07-25T08:40:01.920465Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-25T08:40:01.920795Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-25T08:40:01.925608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.925743Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.925779Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:02.109245Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.181179Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.181887Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.377832Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.380781Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." • Failure in Spec Setup (BeforeEach) [30.549 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 with a service matching the vmi exposed [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:283 should fail to reach the vmi if an invalid servicename is used /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:314 Expected error: <*errors.StatusError | 0xc420153b00>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 ------------------------------ Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:43:47 http: TLS handshake error from 10.129.0.1:43858: EOF 2018/07/25 08:43:57 http: TLS handshake error from 10.129.0.1:43866: EOF 2018/07/25 08:44:07 http: TLS handshake error from 10.129.0.1:43874: EOF 2018/07/25 08:44:17 http: TLS handshake error from 10.129.0.1:43882: EOF 2018/07/25 08:44:27 http: TLS handshake error from 10.129.0.1:43890: EOF 2018/07/25 08:44:37 http: TLS handshake error from 10.129.0.1:43900: EOF level=info timestamp=2018-07-25T08:44:39.415368Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:44:47 http: TLS handshake error from 10.129.0.1:43908: EOF 2018/07/25 08:44:57 http: TLS handshake error from 10.129.0.1:43916: EOF 2018/07/25 08:45:07 http: TLS handshake error from 10.129.0.1:43924: EOF level=info timestamp=2018-07-25T08:45:08.810078Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-25T08:45:09.510111Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:45:17 http: TLS handshake error from 10.129.0.1:43932: EOF 2018/07/25 08:45:27 http: TLS handshake error from 10.129.0.1:43940: EOF 2018/07/25 08:45:37 http: TLS handshake error from 10.129.0.1:43948: EOF Pod name: virt-api-7d79764579-wfvcn Pod phase: Running 2018/07/25 08:45:05 http: TLS handshake error from 10.129.0.1:43590: EOF level=info timestamp=2018-07-25T08:45:05.982191Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:45:08.891255Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:45:15 http: TLS handshake error from 10.129.0.1:43598: EOF level=info timestamp=2018-07-25T08:45:15.748214Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:45:15.759106Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:45:15.771433Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:45:16.110853Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/25 08:45:25 http: TLS handshake error from 10.129.0.1:43606: EOF level=info timestamp=2018-07-25T08:45:26.170085Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:45:29.864820Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-25T08:45:29.870864Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/25 08:45:35 http: TLS handshake error from 10.129.0.1:43614: EOF level=info timestamp=2018-07-25T08:45:36.347830Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:45:39.387158Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:43:06.997657Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilgjrw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilgjrw" level=info timestamp=2018-07-25T08:43:37.394305Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz5d6w kind= uid=c0b6af80-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:43:37.402777Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz5d6w kind= uid=c0b6af80-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:43:37.744444Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz5d6w\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz5d6w" level=info timestamp=2018-07-25T08:43:38.030735Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz5d6w\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiz5d6w, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: c0b6af80-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz5d6w" level=info timestamp=2018-07-25T08:44:08.187905Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqqpwt kind= uid=d316ed77-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:44:08.190194Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqqpwt kind= uid=d316ed77-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:44:08.554142Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqqpwt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqqpwt" level=info timestamp=2018-07-25T08:44:08.616337Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqqpwt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqqpwt" level=info timestamp=2018-07-25T08:44:08.982583Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqqpwt\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqqpwt, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d316ed77-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqqpwt" level=info timestamp=2018-07-25T08:44:39.215028Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitglqx kind= uid=e58b1933-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:44:39.223815Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitglqx kind= uid=e58b1933-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:44:39.481450Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitglqx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitglqx" level=info timestamp=2018-07-25T08:44:39.517191Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitglqx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitglqx" level=info timestamp=2018-07-25T08:44:39.841668Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitglqx\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmitglqx, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: e58b1933-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitglqx" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:40:02.777998Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.778126Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797192Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797346Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.797555Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.802503Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.802683Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.861889Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.862133Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.891552Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.891700Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.935696Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.936112Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.959368Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.959702Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:39:16.680861Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:39:16.685793Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.661562Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.664291Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:01.672691Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmik7jkl" level=info timestamp=2018-07-25T08:40:01.920465Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-25T08:40:01.920795Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-25T08:40:01.925608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.925743Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.925779Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:02.109245Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.181179Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.181887Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.377832Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.380781Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." • Failure in Spec Setup (BeforeEach) [30.679 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 with a subdomain and a headless service given [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:330 should be able to reach the vmi via its unique fully qualified domain name /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:353 Expected error: <*errors.StatusError | 0xc42083d830>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 ------------------------------ Pod name: disks-images-provider-jt7zs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jz9rx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-qxdsk Pod phase: Running 2018/07/25 08:44:27 http: TLS handshake error from 10.129.0.1:43890: EOF 2018/07/25 08:44:37 http: TLS handshake error from 10.129.0.1:43900: EOF level=info timestamp=2018-07-25T08:44:39.415368Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:44:47 http: TLS handshake error from 10.129.0.1:43908: EOF 2018/07/25 08:44:57 http: TLS handshake error from 10.129.0.1:43916: EOF 2018/07/25 08:45:07 http: TLS handshake error from 10.129.0.1:43924: EOF level=info timestamp=2018-07-25T08:45:08.810078Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-25T08:45:09.510111Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:45:17 http: TLS handshake error from 10.129.0.1:43932: EOF 2018/07/25 08:45:27 http: TLS handshake error from 10.129.0.1:43940: EOF 2018/07/25 08:45:37 http: TLS handshake error from 10.129.0.1:43948: EOF 2018/07/25 08:45:47 http: TLS handshake error from 10.129.0.1:43956: EOF 2018/07/25 08:45:57 http: TLS handshake error from 10.129.0.1:43964: EOF 2018/07/25 08:46:07 http: TLS handshake error from 10.129.0.1:43972: EOF level=info timestamp=2018-07-25T08:46:09.162031Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-wfvcn Pod phase: Running 2018/07/25 08:45:35 http: TLS handshake error from 10.129.0.1:43614: EOF level=info timestamp=2018-07-25T08:45:36.347830Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:45:39.387158Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/25 08:45:45 http: TLS handshake error from 10.129.0.1:43622: EOF level=info timestamp=2018-07-25T08:45:45.957207Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:45:45.990001Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:45:46.002387Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:45:46.504508Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/25 08:45:55 http: TLS handshake error from 10.129.0.1:43630: EOF level=info timestamp=2018-07-25T08:45:56.736168Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/25 08:46:05 http: TLS handshake error from 10.129.0.1:43638: EOF level=info timestamp=2018-07-25T08:46:06.957879Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:46:09.030465Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:46:09.047173Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-25T08:46:09.063088Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-blgfz Pod phase: Running level=info timestamp=2018-07-25T08:29:05.120187Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-h72d2 Pod phase: Running level=info timestamp=2018-07-25T08:43:06.997657Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilgjrw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilgjrw" level=info timestamp=2018-07-25T08:43:37.394305Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz5d6w kind= uid=c0b6af80-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:43:37.402777Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz5d6w kind= uid=c0b6af80-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:43:37.744444Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz5d6w\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz5d6w" level=info timestamp=2018-07-25T08:43:38.030735Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz5d6w\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiz5d6w, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: c0b6af80-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz5d6w" level=info timestamp=2018-07-25T08:44:08.187905Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqqpwt kind= uid=d316ed77-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:44:08.190194Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqqpwt kind= uid=d316ed77-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:44:08.554142Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqqpwt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqqpwt" level=info timestamp=2018-07-25T08:44:08.616337Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqqpwt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqqpwt" level=info timestamp=2018-07-25T08:44:08.982583Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqqpwt\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqqpwt, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d316ed77-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqqpwt" level=info timestamp=2018-07-25T08:44:39.215028Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitglqx kind= uid=e58b1933-8fe6-11e8-bb2b-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-25T08:44:39.223815Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitglqx kind= uid=e58b1933-8fe6-11e8-bb2b-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-25T08:44:39.481450Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitglqx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitglqx" level=info timestamp=2018-07-25T08:44:39.517191Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitglqx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitglqx" level=info timestamp=2018-07-25T08:44:39.841668Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitglqx\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmitglqx, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: e58b1933-8fe6-11e8-bb2b-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitglqx" Pod name: virt-handler-cqcks Pod phase: Running level=info timestamp=2018-07-25T08:40:02.777998Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.778126Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind= uid=bddb0767-8fe5-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797192Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.797346Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.797555Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h55w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.802503Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.802683Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind= uid=072ddbeb-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.861889Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.862133Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6km79 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.891552Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.891700Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib4j5w kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.935696Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.936112Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqpg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.959368Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.959702Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixmcbj kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-swxsf Pod phase: Running level=info timestamp=2018-07-25T08:39:16.680861Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-25T08:39:16.685793Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.661562Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.664291Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:01.672691Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmik7jkl" level=info timestamp=2018-07-25T08:40:01.920465Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-25T08:40:01.920795Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-25T08:40:01.925608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:01.925743Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-25T08:40:01.925779Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-25T08:40:02.109245Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.181179Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.181887Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind= uid=19c73b6a-8fe6-11e8-bb2b-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-25T08:40:02.377832Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-25T08:40:02.380781Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmik7jkl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." • Failure in Spec Setup (BeforeEach) [30.718 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom interface model [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:379 should expose the right device type to the guest /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:380 Expected error: <*errors.StatusError | 0xc42083ccf0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 ------------------------------ • [SLOW TEST:185.068 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with default interface model /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:393 should expose the right device type to the guest /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:394 ------------------------------ • [SLOW TEST:32.504 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:413 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:414 ------------------------------ • [SLOW TEST:36.610 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address in non-conventional format /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:425 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:426 ------------------------------ • [SLOW TEST:34.726 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address and slirp interface /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:438 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:439 ------------------------------ • [SLOW TEST:47.780 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with disabled automatic attachment of interfaces /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:451 should not configure any external interfaces /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:452 ------------------------------ •• ------------------------------ • [SLOW TEST:46.214 seconds] Health Monitoring /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:37 A VirtualMachineInstance with a watchdog device /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:56 should be shut down when the watchdog expires /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:57 ------------------------------ volumedisk0 compute • [SLOW TEST:40.409 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with 3 CPU cores /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:56 should report 3 cpu cores under guest OS /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:62 ------------------------------ • [SLOW TEST:19.755 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:108 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-2Mi /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ S [SKIPPING] [0.225 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:108 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-1Gi [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 No node with hugepages hugepages-1Gi capacity /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:160 ------------------------------ • ------------------------------ • [SLOW TEST:117.837 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:238 when CPU model defined /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:284 should report defined CPU model /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:285 ------------------------------ • [SLOW TEST:119.566 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:238 when CPU model equals to passthrough /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:312 should report exactly the same model as node CPU /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:313 ------------------------------ • [SLOW TEST:117.967 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:238 when CPU model not defined /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:336 should report CPU model from libvirt capabilities /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:337 ------------------------------ • [SLOW TEST:43.586 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 New VirtualMachineInstance with all supported drives /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:357 should have all the device nodes /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:380 ------------------------------ • [SLOW TEST:39.011 seconds] LeaderElection /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:43 Start a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:53 when the controller pod is not running /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:54 should success /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:55 ------------------------------ • [SLOW TEST:41.580 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:35 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:64 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a cirros image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should return that we are running cirros /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:67 ------------------------------ • [SLOW TEST:43.184 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:35 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:64 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a fedora image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:76 should return that we are running fedora /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:77 ------------------------------ • [SLOW TEST:34.389 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:35 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:64 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 should be able to reconnect to console multiple times /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:86 ------------------------------ • [SLOW TEST:42.332 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userDataBase64 source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:81 should have cloud-init data /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:82 ------------------------------ • [SLOW TEST:97.064 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userDataBase64 source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:81 with injected ssh-key /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:92 should have ssh-key under authorized keys /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:93 ------------------------------ • [SLOW TEST:53.600 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userData source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:118 should process provided cloud-init data /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:119 ------------------------------ • [SLOW TEST:42.660 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 should take user-data from k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:162 ------------------------------ • [SLOW TEST:21.900 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:20.913 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given an vm /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:20.647 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi preset /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:20.557 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi replica set /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:18.917 seconds] VNC /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:54 with VNC connection /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:62 should allow accessing the VNC device /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:64 ------------------------------ ••• ------------------------------ • [SLOW TEST:5.530 seconds] Subresource Api /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:37 Rbac Authorization /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:48 Without permissions /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:56 should not be able to access subresource endpoint /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:57 ------------------------------ • [SLOW TEST:5.525 seconds] Subresource Api /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:37 Rbac Authorization For Version Command /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:63 with authenticated user /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:66 should be allowed to access subresource version endpoint /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:67 ------------------------------ • [SLOW TEST:5.466 seconds] Subresource Api /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:37 Rbac Authorization For Version Command /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:63 Without permissions /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:71 should be able to access subresource version endpoint /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:72 ------------------------------ ••••••••••• ------------------------------ • [SLOW TEST:6.055 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should scale /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 to three, to two and then to zero replicas /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:8.893 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should scale /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 to five, to six and then to zero replicas /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ •• ------------------------------ • [SLOW TEST:18.687 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should update readyReplicas once VMIs are up /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:157 ------------------------------ • [SLOW TEST:13.763 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should remove VMIs once it is marked for deletion /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:169 ------------------------------ • ------------------------------ • [SLOW TEST:5.746 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should not scale when paused and scale when resume /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:223 ------------------------------ • [SLOW TEST:5.743 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should remove the finished VM /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:279 ------------------------------ • ------------------------------ • [SLOW TEST:18.038 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 should start it /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:76 ------------------------------ • [SLOW TEST:18.855 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 should attach virt-launcher to it /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:82 ------------------------------ •••• ------------------------------ • [SLOW TEST:33.190 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:170 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Alpine as first boot /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:26.105 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:170 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Cirros as first boot /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:17.051 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:201 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:202 should retry starting the VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:203 ------------------------------ • [SLOW TEST:18.938 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:201 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:202 should log warning and proceed once the secret is there /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:233 ------------------------------ • [SLOW TEST:38.445 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 when virt-launcher crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:281 should be stopped and have Failed phase /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:282 ------------------------------ • [SLOW TEST:29.460 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 when virt-handler crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:304 should recover and continue management /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:305 ------------------------------ • [SLOW TEST:6.360 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 when virt-handler is responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:335 should indicate that a node is ready for vmis /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:336 ------------------------------ • [SLOW TEST:94.693 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 when virt-handler is not responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:366 the node controller should react /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:405 ------------------------------ • [SLOW TEST:19.803 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 with node tainted /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:458 the vmi with tolerations should be scheduled /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:480 ------------------------------ • ------------------------------ S [SKIPPING] [0.809 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 with non default namespace /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:530 should log libvirt start and stop lifecycle events of the domain /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 kubevirt-test-default [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Skip log query tests for JENKINS ci test environment /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:535 ------------------------------ S [SKIPPING] [0.301 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 with non default namespace /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:530 should log libvirt start and stop lifecycle events of the domain /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 kubevirt-test-alternative [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Skip log query tests for JENKINS ci test environment /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:535 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [1.032 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:591 should enable emulation in virt-launcher [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:603 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:599 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.838 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:591 should be reflected in domain XML [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:640 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:599 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.343 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:591 should request a TUN device but not KVM [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:684 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:599 ------------------------------ •••• ------------------------------ • [SLOW TEST:21.868 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Delete a VirtualMachineInstance's Pod /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:836 should result in the VirtualMachineInstance moving to a finalized state /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:837 ------------------------------ • [SLOW TEST:42.916 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:868 with an active pod. /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:869 should result in pod being terminated /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:870 ------------------------------ • [SLOW TEST:24.505 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:868 with grace period greater than 0 /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:893 should run graceful shutdown /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:894 ------------------------------ • [SLOW TEST:33.241 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Killed VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:945 should be in Failed phase /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:946 ------------------------------ • [SLOW TEST:27.209 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Killed VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:945 should be left alone by virt-handler /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:973 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.011 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 should succeed to start a vmi [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:133 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1350 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.010 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 should succeed to stop a running vmi [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:139 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1350 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.015 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with winrm connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:150 should have correct UUID /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:192 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1350 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.010 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with winrm connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:150 should have pod IP /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:208 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1350 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.014 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with kubectl command [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:226 should succeed to start a vmi /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:242 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1350 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.012 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with kubectl command [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:226 should succeed to stop a vmi /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:250 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1350 ------------------------------ • [SLOW TEST:93.959 seconds] Slirp /root/go/src/kubevirt.io/kubevirt/tests/vmi_slirp_interface_test.go:39 should be able to /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 VirtualMachineInstance with slirp interface /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ ••• ------------------------------ • [SLOW TEST:19.552 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should update VirtualMachine once VMIs are up /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:195 ------------------------------ •• ------------------------------ • [SLOW TEST:28.193 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should recreate VirtualMachineInstance if it gets deleted /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:245 ------------------------------ • [SLOW TEST:55.496 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should recreate VirtualMachineInstance if the VirtualMachineInstance's pod gets deleted /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:265 ------------------------------ • [SLOW TEST:26.979 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should stop VirtualMachineInstance if running set to false /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:325 ------------------------------ • [SLOW TEST:155.424 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should start and stop VirtualMachineInstance multiple times /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:333 ------------------------------ • [SLOW TEST:49.267 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should not update the VirtualMachineInstance spec if Running /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:346 ------------------------------ • [SLOW TEST:173.063 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should survive guest shutdown, multiple times /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:387 ------------------------------ VM testvmillzct was scheduled to start • [SLOW TEST:20.819 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 Using virtctl interface /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:435 should start a VirtualMachineInstance once /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:436 ------------------------------ VM testvmisshn5 was scheduled to stop • [SLOW TEST:26.877 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 Using virtctl interface /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:435 should stop a VirtualMachineInstance once /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:467 ------------------------------ ••• ------------------------------ • [SLOW TEST:5.950 seconds] Templates /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:42 Launching VMI from VM Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:60 with given Fedora Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:193 with given VM JSON from the Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:152 with given VM from the VM JSON /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:158 with given VMI from the VM /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:163 should succeed to terminate the VMI using oc-patch command /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:166 ------------------------------ Waiting for namespace kubevirt-test-default to be removed, this can take a while ... Waiting for namespace kubevirt-test-alternative to be removed, this can take a while ... Summarizing 27 Failures: [Fail] Storage Starting a VirtualMachineInstance With ephemeral alpine PVC [It] should not persist data /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:226 [Fail] Storage Starting a VirtualMachineInstance With VirtualMachineInstance with two PVCs [It] should start vmi multiple times /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:64 [Fail] HookSidecars VMI definition with SM BIOS hook sidecar [It] should successfully start with hook sidecar annotation /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:63 [Fail] HookSidecars VMI definition with SM BIOS hook sidecar [It] should call Collect and OnDefineDomain on the hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:70 [Fail] HookSidecars VMI definition with SM BIOS hook sidecar [It] should update domain XML with SM BIOS properties /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1029 [Fail] RegistryDisk Starting and stopping the same VirtualMachineInstance with ephemeral registry disk [It] should success multiple times /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:98 [Fail] RegistryDisk Starting a VirtualMachineInstance with ephemeral registry disk [It] should not modify the spec on status update /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:119 [Fail] RegistryDisk Starting multiple VMIs with ephemeral registry disk [It] should success /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:55 [Fail] Expose Expose service on a VM [BeforeEach] Expose ClusterIP service Should expose a Cluster IP service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 [Fail] Expose Expose service on a VM [BeforeEach] Expose ClusterIP service with string target-port Should expose a ClusterIP service and connect to the vm on port 80 /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 [Fail] Expose Expose service on a VM [BeforeEach] Expose NodePort service Should expose a NodePort service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 [Fail] Expose Expose UDP service on a VMI [BeforeEach] Expose ClusterIP UDP service Should expose a ClusterIP service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 [Fail] Expose Expose UDP service on a VMI [BeforeEach] Expose NodePort UDP service Should expose a NodePort service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 [Fail] Expose Expose service on a VMI replica set [BeforeEach] Expose ClusterIP service Should create a ClusterIP service on VMRS and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:264 [Fail] Expose Expose service on an VM [BeforeEach] Expose ClusterIP service Connect to ClusterIP services that was set when VM was offline /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:331 [Fail] Networking [BeforeEach] should be able to reach the Inbound VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 [Fail] Networking [BeforeEach] should be able to reach the Inbound VirtualMachineInstance with pod network connectivity explicitly set /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 [Fail] Networking [BeforeEach] should be able to reach the Inbound VirtualMachineInstance with custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 [Fail] Networking [BeforeEach] should be able to reach the internet /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 [Fail] Networking [BeforeEach] should be reachable via the propagated IP from a Pod on the same node from Pod /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 [Fail] Networking [BeforeEach] should be reachable via the propagated IP from a Pod on a different node from Pod /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 [Fail] Networking [BeforeEach] should be reachable via the propagated IP from a Pod on the same node from Node /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 [Fail] Networking [BeforeEach] should be reachable via the propagated IP from a Pod on a different node from Node /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 [Fail] Networking [BeforeEach] with a service matching the vmi exposed should be able to reach the vmi based on labels specified on the vmi /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 [Fail] Networking [BeforeEach] with a service matching the vmi exposed should fail to reach the vmi if an invalid servicename is used /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 [Fail] Networking [BeforeEach] with a subdomain and a headless service given should be able to reach the vmi via its unique fully qualified domain name /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 [Fail] Networking [BeforeEach] VirtualMachineInstance with custom interface model should expose the right device type to the guest /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:146 Ran 133 of 145 Specs in 4147.168 seconds FAIL! -- 106 Passed | 27 Failed | 0 Pending | 12 Skipped --- FAIL: TestTests (4147.18s) FAIL make: *** [functest] Error 1 + make cluster-down ./cluster/down.sh