+ export WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + [[ openshift-3.10-release =~ openshift-.* ]] + [[ openshift-3.10-release =~ .*-crio-.* ]] + export KUBEVIRT_PROVIDER=os-3.10.0 + KUBEVIRT_PROVIDER=os-3.10.0 + export KUBEVIRT_NUM_NODES=2 + KUBEVIRT_NUM_NODES=2 + export NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + export NAMESPACE=kube-system + NAMESPACE=kube-system + trap '{ make cluster-down; }' EXIT SIGINT SIGTERM SIGSTOP + make cluster-down ./cluster/down.sh + make cluster-up ./cluster/up.sh Downloading ....... Downloading ....... 2018/07/30 08:55:21 Waiting for host: 192.168.66.102:22 2018/07/30 08:55:24 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/30 08:55:32 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/30 08:55:40 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/30 08:55:48 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/30 08:55:56 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/30 08:56:04 Connected to tcp://192.168.66.102:22 + systemctl stop origin-node.service + rm -rf /etc/origin/ /etc/etcd/ /var/lib/origin /var/lib/etcd/ ++ docker ps -q + containers= + '[' -n '' ']' ++ docker ps -q -a + containers='2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3' + '[' -n '2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3' ']' + docker rm -f 2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3 2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3 2018/07/30 08:56:09 Waiting for host: 192.168.66.101:22 2018/07/30 08:56:12 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/30 08:56:20 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/30 08:56:28 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/30 08:56:33 Connected to tcp://192.168.66.101:22 + inventory_file=/root/inventory + openshift_ansible=/root/openshift-ansible + echo '[new_nodes]' + sed -i '/\[OSEv3:children\]/a new_nodes' /root/inventory + nodes_found=false ++ seq 2 100 + for i in '$(seq 2 100)' ++ printf node%02d 2 + node=node02 ++ printf 192.168.66.1%02d 2 + node_ip=192.168.66.102 + set +e + ping 192.168.66.102 -c 1 PING 192.168.66.102 (192.168.66.102) 56(84) bytes of data. 64 bytes from 192.168.66.102: icmp_seq=1 ttl=64 time=0.753 ms --- 192.168.66.102 ping statistics --- 1 packets transmitted, 1 received, 0% packet loss, time 0ms rtt min/avg/max/mdev = 0.753/0.753/0.753/0.000 ms Found node02. Adding it to the inventory. + '[' 0 -ne 0 ']' + nodes_found=true + set -e + echo '192.168.66.102 node02' + echo 'Found node02. Adding it to the inventory.' + echo 'node02 openshift_node_group_name="node-config-compute" openshift_schedulable=true openshift_ip=192.168.66.102' + for i in '$(seq 2 100)' ++ printf node%02d 3 + node=node03 ++ printf 192.168.66.1%02d 3 + node_ip=192.168.66.103 + set +e + ping 192.168.66.103 -c 1 PING 192.168.66.103 (192.168.66.103) 56(84) bytes of data. From 192.168.66.101 icmp_seq=1 Destination Host Unreachable --- 192.168.66.103 ping statistics --- 1 packets transmitted, 0 received, +1 errors, 100% packet loss, time 0ms + '[' 1 -ne 0 ']' + break + '[' true = true ']' + ansible-playbook -i /root/inventory /root/openshift-ansible/playbooks/openshift-node/scaleup.yml PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) PLAY [Ensure there are new_nodes] ********************************************** TASK [fail] ******************************************************************** skipping: [localhost] TASK [fail] ******************************************************************** skipping: [localhost] PLAY [Initialization Checkpoint Start] ***************************************** TASK [Set install initialization 'In Progress'] ******************************** ok: [node01] PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) [WARNING]: Could not match supplied host pattern, ignoring: oo_lb_to_config PLAY [Ensure that all non-node hosts are accessible] *************************** TASK [Gathering Facts] ********************************************************* ok: [node01] PLAY [Initialize basic host facts] ********************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/deprecations.yml for node01, node02 TASK [openshift_sanitize_inventory : Check for usage of deprecated variables] *** ok: [node02] ok: [node01] TASK [openshift_sanitize_inventory : debug] ************************************ skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_stats] ******************************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Assign deprecated variables to correct counterparts] *** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_logging.yml for node01, node02 included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_metrics.yml for node01, node02 TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Standardize on latest variable names] ***** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Normalize openshift_release] ************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Abort when openshift_release is invalid] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/unsupported.yml for node01, node02 TASK [openshift_sanitize_inventory : Ensure that openshift_use_dnsmasq is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that openshift_node_dnsmasq_install_network_manager_hook is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* skipping: [node01] => (item=openshift_hosted_etcd_storage_kind) skipping: [node02] => (item=openshift_hosted_etcd_storage_kind) TASK [openshift_sanitize_inventory : Ensure that dynamic provisioning is set if using dynamic storage] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure clusterid is set along with the cloudprovider] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure ansible_service_broker_remove and ansible_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure template_service_broker_remove and template_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that all requires vsphere configuration variables are set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : ensure provider configuration variables are defined] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure removed web console extension variables are not set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that web console port matches API server port] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : At least one master is schedulable] ******* skipping: [node02] skipping: [node01] TASK [Detecting Operating System from ostree_booted] *************************** ok: [node02] ok: [node01] TASK [set openshift_deployment_type if unset] ********************************** skipping: [node01] skipping: [node02] TASK [check for node already bootstrapped] ************************************* ok: [node02] ok: [node01] TASK [initialize_facts set fact openshift_is_bootstrapped] ********************* ok: [node01] ok: [node02] TASK [initialize_facts set fact openshift_is_atomic and openshift_is_containerized] *** ok: [node01] ok: [node02] TASK [Determine Atomic Host Docker Version] ************************************ skipping: [node01] skipping: [node02] TASK [assert atomic host docker version is 1.12 or later] ********************** skipping: [node01] skipping: [node02] PLAY [Retrieve existing master configs and validate] *************************** TASK [openshift_control_plane : stat] ****************************************** ok: [node01] TASK [openshift_control_plane : slurp] ***************************************** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [openshift_control_plane : Check for file paths outside of /etc/origin/master in master's config] *** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Initialize special first-master variables] ******************************* TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Disable web console if required] ***************************************** TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Setup yum repositories for all hosts] ************************************ TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Install Red Hat Subscription manager] ******************* skipping: [node02] TASK [rhel_subscribe : Is host already registered?] **************************** skipping: [node02] TASK [rhel_subscribe : Register host] ****************************************** skipping: [node02] TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Determine if OpenShift Pool Already Attached] *********** skipping: [node02] TASK [rhel_subscribe : Attach to OpenShift Pool] ******************************* skipping: [node02] TASK [rhel_subscribe : Satellite preparation] ********************************** skipping: [node02] TASK [openshift_repos : openshift_repos detect ostree] ************************* ok: [node02] TASK [openshift_repos : Ensure libselinux-python is installed] ***************** ok: [node02] TASK [openshift_repos : Remove openshift_additional.repo file] ***************** ok: [node02] TASK [openshift_repos : Create any additional repos that are defined] ********** TASK [openshift_repos : include_tasks] ***************************************** skipping: [node02] TASK [openshift_repos : include_tasks] ***************************************** included: /root/openshift-ansible/roles/openshift_repos/tasks/centos_repos.yml for node02 TASK [openshift_repos : Configure origin gpg keys] ***************************** ok: [node02] TASK [openshift_repos : Configure correct origin release repository] *********** ok: [node02] => (item=/root/openshift-ansible/roles/openshift_repos/templates/CentOS-OpenShift-Origin.repo.j2) TASK [openshift_repos : Ensure clean repo cache in the event repos have been changed manually] *** changed: [node02] => { "msg": "First run of openshift_repos" } TASK [openshift_repos : Record that openshift_repos already ran] *************** ok: [node02] RUNNING HANDLER [openshift_repos : refresh cache] ****************************** changed: [node02] PLAY [Install packages necessary for installer] ******************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Determine if chrony is installed] **************************************** [WARNING]: Consider using the yum, dnf or zypper module rather than running rpm. If you need to use command because yum, dnf or zypper is insufficient you can add warn=False to this command task or set command_warnings=False in ansible.cfg to get rid of this message. changed: [node02] TASK [Install ntp package] ***************************************************** skipping: [node02] TASK [Start and enable ntpd/chronyd] ******************************************* changed: [node02] TASK [Ensure openshift-ansible installer package deps are installed] *********** ok: [node02] => (item=iproute) ok: [node02] => (item=dbus-python) ok: [node02] => (item=PyYAML) ok: [node02] => (item=python-ipaddress) ok: [node02] => (item=libsemanage-python) ok: [node02] => (item=yum-utils) ok: [node02] => (item=python-docker) PLAY [Initialize cluster facts] ************************************************ TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [get openshift_current_version] ******************************************* ok: [node02] ok: [node01] TASK [set_fact openshift_portal_net if present on masters] ********************* ok: [node01] ok: [node02] TASK [Gather Cluster facts] **************************************************** changed: [node02] changed: [node01] TASK [Set fact of no_proxy_internal_hostnames] ********************************* skipping: [node01] skipping: [node02] TASK [Initialize openshift.node.sdn_mtu] *************************************** changed: [node02] ok: [node01] PLAY [Initialize etcd host variables] ****************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** ok: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** ok: [node01] => { "msg": "openshift_pkg_version was not defined. Falling back to -3.10.0" } TASK [openshift_version : set_fact] ******************************************** ok: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Verify Requirements] ***************************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [Run variable sanity checks] ********************************************** ok: [node01] TASK [Validate openshift_node_groups and openshift_node_group_name] ************ ok: [node01] PLAY [Initialization Checkpoint End] ******************************************* TASK [Set install initialization 'Complete'] *********************************** ok: [node01] PLAY [Validate node hostnames] ************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Query DNS for IP address of node02] ************************************** ok: [node02] TASK [Validate openshift_hostname when defined] ******************************** skipping: [node02] TASK [Validate openshift_ip exists on node when defined] *********************** skipping: [node02] PLAY [Configure os_firewall] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [os_firewall : Detecting Atomic Host Operating System] ******************** ok: [node02] TASK [os_firewall : Set fact r_os_firewall_is_atomic] ************************** ok: [node02] TASK [os_firewall : Fail - Firewalld is not supported on Atomic Host] ********** skipping: [node02] TASK [os_firewall : Install firewalld packages] ******************************** skipping: [node02] TASK [os_firewall : Ensure iptables services are not enabled] ****************** skipping: [node02] => (item=iptables) skipping: [node02] => (item=ip6tables) TASK [os_firewall : Wait 10 seconds after disabling iptables] ****************** skipping: [node02] TASK [os_firewall : Start and enable firewalld service] ************************ skipping: [node02] TASK [os_firewall : need to pause here, otherwise the firewalld service starting can sometimes cause ssh to fail] *** skipping: [node02] TASK [os_firewall : Restart polkitd] ******************************************* skipping: [node02] TASK [os_firewall : Wait for polkit action to have been created] *************** skipping: [node02] TASK [os_firewall : Ensure firewalld service is not enabled] ******************* ok: [node02] TASK [os_firewall : Wait 10 seconds after disabling firewalld] ***************** skipping: [node02] TASK [os_firewall : Install iptables packages] ********************************* ok: [node02] => (item=iptables) ok: [node02] => (item=iptables-services) TASK [os_firewall : Start and enable iptables service] ************************* ok: [node02 -> node02] => (item=node02) TASK [os_firewall : need to pause here, otherwise the iptables service starting can sometimes cause ssh to fail] *** skipping: [node02] PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [container_runtime : Setup the docker-storage for overlay] **************** skipping: [node02] TASK [container_runtime : Create file system on extra volume device] *********** TASK [container_runtime : Create mount entry for extra volume] ***************** PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** ok: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* ok: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Add http_proxy to /etc/atomic.conf] ****************** skipping: [node02] TASK [container_runtime : Add https_proxy to /etc/atomic.conf] ***************** skipping: [node02] TASK [container_runtime : Add no_proxy to /etc/atomic.conf] ******************** skipping: [node02] TASK [container_runtime : Get current installed Docker version] **************** ok: [node02] TASK [container_runtime : Error out if Docker pre-installed but too old] ******* skipping: [node02] TASK [container_runtime : Error out if requested Docker is too old] ************ skipping: [node02] TASK [container_runtime : Install Docker] ************************************** skipping: [node02] TASK [container_runtime : Ensure docker.service.d directory exists] ************ ok: [node02] TASK [container_runtime : Configure Docker service unit file] ****************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Set registry params] ********************************* skipping: [node02] => (item={u'reg_conf_var': u'ADD_REGISTRY', u'reg_flag': u'--add-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'BLOCK_REGISTRY', u'reg_flag': u'--block-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'INSECURE_REGISTRY', u'reg_flag': u'--insecure-registry', u'reg_fact_val': []}) TASK [container_runtime : Place additional/blocked/insecure registries in /etc/containers/registries.conf] *** skipping: [node02] TASK [container_runtime : Set Proxy Settings] ********************************** skipping: [node02] => (item={u'reg_conf_var': u'HTTP_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'HTTPS_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'NO_PROXY', u'reg_fact_val': u''}) TASK [container_runtime : Set various Docker options] ************************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Configure Docker Network OPTIONS] ******************** ok: [node02] TASK [container_runtime : Detect if docker is already started] ***************** ok: [node02] TASK [container_runtime : Start the Docker service] **************************** ok: [node02] TASK [container_runtime : set_fact] ******************************************** ok: [node02] TASK [container_runtime : Check for docker_storage_path/overlay2] ************** ok: [node02] TASK [container_runtime : Fixup SELinux permissions for docker] **************** changed: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* ok: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** ok: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** ok: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] TASK [container_runtime : Fail if Atomic Host since this is an rpm request] **** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Check that overlay is in the kernel] ***************** skipping: [node02] TASK [container_runtime : Add overlay to modprobe.d] *************************** skipping: [node02] TASK [container_runtime : Manually modprobe overlay into the kernel] *********** skipping: [node02] TASK [container_runtime : Enable and start systemd-modules-load] *************** skipping: [node02] TASK [container_runtime : Install cri-o] *************************************** skipping: [node02] TASK [container_runtime : Remove CRI-O default configuration files] ************ skipping: [node02] => (item=/etc/cni/net.d/200-loopback.conf) skipping: [node02] => (item=/etc/cni/net.d/100-crio-bridge.conf) TASK [container_runtime : Create the CRI-O configuration] ********************** skipping: [node02] TASK [container_runtime : Ensure CNI configuration directory exists] *********** skipping: [node02] TASK [container_runtime : Add iptables allow rules] **************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove iptables rules] ******************************* TASK [container_runtime : Add firewalld allow rules] *************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove firewalld allow rules] ************************ TASK [container_runtime : Configure the CNI network] *************************** skipping: [node02] TASK [container_runtime : Create /etc/sysconfig/crio-network] ****************** skipping: [node02] TASK [container_runtime : Start the CRI-O service] ***************************** skipping: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* skipping: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** skipping: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** skipping: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** skipping: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Node Preparation Checkpoint Start] *************************************** TASK [Set Node preparation 'In Progress'] ************************************** ok: [node01] PLAY [Only target nodes that have not yet been bootstrapped] ******************* TASK [Gathering Facts] ********************************************************* ok: [localhost] TASK [add_host] **************************************************************** skipping: [localhost] => (item=node02) ok: [localhost] => (item=node01) PLAY [Disable excluders] ******************************************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/disable.yml for node02 TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-docker-excluder version detected] ************ skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-docker-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-excluder version detected] ******************* skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** skipping: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] PLAY [Configure nodes] ********************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_cloud_provider : Set cloud provider facts] ********************* skipping: [node02] TASK [openshift_cloud_provider : Create cloudprovider config dir] ************** skipping: [node02] TASK [openshift_cloud_provider : include the defined cloud provider files] ***** skipping: [node02] TASK [openshift_node : fail] *************************************************** skipping: [node02] TASK [openshift_node : Check for NetworkManager service] *********************** ok: [node02] TASK [openshift_node : Set fact using_network_manager] ************************* ok: [node02] TASK [openshift_node : Install dnsmasq] **************************************** ok: [node02] TASK [openshift_node : ensure origin/node directory exists] ******************** changed: [node02] => (item=/etc/origin) changed: [node02] => (item=/etc/origin/node) TASK [openshift_node : Install NetworkManager during node_bootstrap provisioning] *** skipping: [node02] TASK [openshift_node : Install network manager dispatch script] **************** skipping: [node02] TASK [openshift_node : Install dnsmasq configuration] ************************** ok: [node02] TASK [openshift_node : Deploy additional dnsmasq.conf] ************************* skipping: [node02] TASK [openshift_node : Enable dnsmasq] ***************************************** ok: [node02] TASK [openshift_node : Install network manager dispatch script] **************** ok: [node02] TASK [openshift_node : Add iptables allow rules] ******************************* ok: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) ok: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) ok: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) ok: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) ok: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove iptables rules] ********************************** TASK [openshift_node : Add firewalld allow rules] ****************************** skipping: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) skipping: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) skipping: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) skipping: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) skipping: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove firewalld allow rules] *************************** TASK [openshift_node : Checking for journald.conf] ***************************** ok: [node02] TASK [openshift_node : Create journald persistence directories] **************** ok: [node02] TASK [openshift_node : Update journald setup] ********************************** ok: [node02] => (item={u'var': u'Storage', u'val': u'persistent'}) ok: [node02] => (item={u'var': u'Compress', u'val': True}) ok: [node02] => (item={u'var': u'SyncIntervalSec', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitInterval', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitBurst', u'val': 10000}) ok: [node02] => (item={u'var': u'SystemMaxUse', u'val': u'8G'}) ok: [node02] => (item={u'var': u'SystemKeepFree', u'val': u'20%'}) ok: [node02] => (item={u'var': u'SystemMaxFileSize', u'val': u'10M'}) ok: [node02] => (item={u'var': u'MaxRetentionSec', u'val': u'1month'}) ok: [node02] => (item={u'var': u'MaxFileSec', u'val': u'1day'}) ok: [node02] => (item={u'var': u'ForwardToSyslog', u'val': False}) ok: [node02] => (item={u'var': u'ForwardToWall', u'val': False}) TASK [openshift_node : Restart journald] *************************************** skipping: [node02] TASK [openshift_node : Disable swap] ******************************************* ok: [node02] TASK [openshift_node : Install node, clients, and conntrack packages] ********** ok: [node02] => (item={u'name': u'origin-node-3.10.0*'}) ok: [node02] => (item={u'name': u'origin-clients-3.10.0*'}) ok: [node02] => (item={u'name': u'conntrack-tools'}) TASK [openshift_node : Restart cri-o] ****************************************** skipping: [node02] TASK [openshift_node : restart NetworkManager to ensure resolv.conf is present] *** changed: [node02] TASK [openshift_node : sysctl] ************************************************* ok: [node02] TASK [openshift_node : Check for credentials file for registry auth] *********** skipping: [node02] TASK [openshift_node : Create credentials for registry auth] ******************* skipping: [node02] TASK [openshift_node : Create credentials for registry auth (alternative)] ***** skipping: [node02] TASK [openshift_node : Setup ro mount of /root/.docker for containerized hosts] *** skipping: [node02] TASK [openshift_node : Check that node image is present] *********************** changed: [node02] TASK [openshift_node : Pre-pull node image] ************************************ skipping: [node02] TASK [openshift_node : Copy node script to the node] *************************** ok: [node02] TASK [openshift_node : Install Node service file] ****************************** ok: [node02] TASK [openshift_node : Ensure old system path is set] ************************** skipping: [node02] => (item=/etc/origin/openvswitch) skipping: [node02] => (item=/var/lib/kubelet) skipping: [node02] => (item=/opt/cni/bin) TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Copy node container image to ostree storage] ************ skipping: [node02] TASK [openshift_node : Install or Update node system container] **************** skipping: [node02] TASK [openshift_node : Restart network manager to ensure networking configuration is in place] *** skipping: [node02] TASK [openshift_node : Configure Node settings] ******************************** ok: [node02] => (item={u'regex': u'^OPTIONS=', u'line': u'OPTIONS='}) ok: [node02] => (item={u'regex': u'^DEBUG_LOGLEVEL=', u'line': u'DEBUG_LOGLEVEL=2'}) ok: [node02] => (item={u'regex': u'^IMAGE_VERSION=', u'line': u'IMAGE_VERSION=v3.10.0-rc.0'}) TASK [openshift_node : Configure Proxy Settings] ******************************* skipping: [node02] => (item={u'regex': u'^HTTP_PROXY=', u'line': u'HTTP_PROXY='}) skipping: [node02] => (item={u'regex': u'^HTTPS_PROXY=', u'line': u'HTTPS_PROXY='}) skipping: [node02] => (item={u'regex': u'^NO_PROXY=', u'line': u'NO_PROXY=[],172.30.0.0/16,10.128.0.0/14'}) TASK [openshift_node : file] *************************************************** skipping: [node02] TASK [openshift_node : Create the Node config] ********************************* changed: [node02] TASK [openshift_node : Configure Node Environment Variables] ******************* TASK [openshift_node : Ensure the node static pod directory exists] ************ changed: [node02] TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Install NFS storage plugin dependencies] **************** ok: [node02] TASK [openshift_node : Check for existence of nfs sebooleans] ****************** ok: [node02] => (item=virt_use_nfs) ok: [node02] => (item=virt_sandbox_use_nfs) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-30 09:04:21.053409', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.029054', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-30 09:04:21.024355', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-30 09:04:22.169237', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.018084', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-30 09:04:22.151153', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-30 09:04:21.053409', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.029054', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-30 09:04:21.024355', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-30 09:04:22.169237', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.018084', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-30 09:04:22.151153', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install GlusterFS storage plugin dependencies] ********** ok: [node02] TASK [openshift_node : Check for existence of fusefs sebooleans] *************** ok: [node02] => (item=virt_use_fusefs) ok: [node02] => (item=virt_sandbox_use_fusefs) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-30 09:04:28.004760', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.017061', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-30 09:04:27.987699', '_ansible_ignore_errors': None, 'failed': False}) ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-30 09:04:29.198063', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.020199', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-30 09:04:29.177864', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-30 09:04:28.004760', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.017061', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-30 09:04:27.987699', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-30 09:04:29.198063', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.020199', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-30 09:04:29.177864', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install Ceph storage plugin dependencies] *************** ok: [node02] TASK [openshift_node : Install iSCSI storage plugin dependencies] ************** ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=device-mapper-multipath) TASK [openshift_node : restart services] *************************************** ok: [node02] => (item=multipathd) ok: [node02] => (item=rpcbind) ok: [node02] => (item=iscsid) TASK [openshift_node : Template multipath configuration] *********************** changed: [node02] TASK [openshift_node : Enable and start multipath] ***************************** changed: [node02] TASK [tuned : Check for tuned package] ***************************************** ok: [node02] TASK [tuned : Set tuned OpenShift variables] *********************************** ok: [node02] TASK [tuned : Ensure directory structure exists] ******************************* ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Ensure files are populated from templates] *********************** skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Make tuned use the recommended tuned profile on restart] ********* changed: [node02] => (item=/etc/tuned/active_profile) changed: [node02] => (item=/etc/tuned/profile_mode) TASK [tuned : Restart tuned service] ******************************************* changed: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Install logrotate] ******* ok: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Setup logrotate.d scripts] *** PLAY [node bootstrap config] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_node : install needed rpm(s)] ********************************** ok: [node02] => (item=origin-node) ok: [node02] => (item=origin-docker-excluder) ok: [node02] => (item=ansible) ok: [node02] => (item=bash-completion) ok: [node02] => (item=docker) ok: [node02] => (item=haproxy) ok: [node02] => (item=dnsmasq) ok: [node02] => (item=ntp) ok: [node02] => (item=logrotate) ok: [node02] => (item=httpd-tools) ok: [node02] => (item=bind-utils) ok: [node02] => (item=firewalld) ok: [node02] => (item=libselinux-python) ok: [node02] => (item=conntrack-tools) ok: [node02] => (item=openssl) ok: [node02] => (item=iproute) ok: [node02] => (item=python-dbus) ok: [node02] => (item=PyYAML) ok: [node02] => (item=yum-utils) ok: [node02] => (item=glusterfs-fuse) ok: [node02] => (item=device-mapper-multipath) ok: [node02] => (item=nfs-utils) ok: [node02] => (item=cockpit-ws) ok: [node02] => (item=cockpit-system) ok: [node02] => (item=cockpit-bridge) ok: [node02] => (item=cockpit-docker) ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=ceph-common) TASK [openshift_node : create the directory for node] ************************** skipping: [node02] TASK [openshift_node : laydown systemd override] ******************************* skipping: [node02] TASK [openshift_node : update the sysconfig to have necessary variables] ******* ok: [node02] => (item={u'regexp': u'^KUBECONFIG=.*', u'line': u'KUBECONFIG=/etc/origin/node/bootstrap.kubeconfig'}) TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : disable origin-node service] **************************** changed: [node02] => (item=origin-node.service) TASK [openshift_node : Check for RPM generated config marker file .config_managed] *** ok: [node02] TASK [openshift_node : create directories for bootstrapping] ******************* ok: [node02] => (item=/root/openshift_bootstrap) changed: [node02] => (item=/var/lib/origin/openshift.local.config) changed: [node02] => (item=/var/lib/origin/openshift.local.config/node) ok: [node02] => (item=/etc/docker/certs.d/docker-registry.default.svc:5000) TASK [openshift_node : laydown the bootstrap.yml file for on boot configuration] *** ok: [node02] TASK [openshift_node : Create a symlink to the node client CA for the docker registry] *** ok: [node02] TASK [openshift_node : Remove RPM generated config files if present] *********** skipping: [node02] => (item=master) skipping: [node02] => (item=.config_managed) TASK [openshift_node : find all files in /etc/origin/node so we can remove them] *** skipping: [node02] TASK [openshift_node : Remove everything except the resolv.conf required for node] *** skipping: [node02] TASK [openshift_node_group : create node config template] ********************** changed: [node02] TASK [openshift_node_group : remove existing node config] ********************** changed: [node02] TASK [openshift_node_group : Ensure required directories are present] ********** ok: [node02] => (item=/etc/origin/node/pods) changed: [node02] => (item=/etc/origin/node/certificates) TASK [openshift_node_group : Update the sysconfig to group "node-config-compute"] *** changed: [node02] TASK [set_fact] **************************************************************** ok: [node02] PLAY [Re-enable excluder if it was previously enabled] ************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/enable.yml for node02 TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] PLAY [Node Preparation Checkpoint End] ***************************************** TASK [Set Node preparation 'Complete'] ***************************************** ok: [node01] PLAY [Distribute bootstrap and start nodes] ************************************ TASK [openshift_node : Gather node information] ******************************** changed: [node02] ok: [node01] TASK [openshift_node : Copy master bootstrap config locally] ******************* ok: [node02] TASK [openshift_node : Distribute bootstrap kubeconfig if one does not exist] *** ok: [node01] changed: [node02] TASK [openshift_node : Start and enable node for bootstrapping] **************** changed: [node02] changed: [node01] TASK [openshift_node : Get node logs] ****************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : debug] ************************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : fail] *************************************************** skipping: [node02] skipping: [node01] PLAY [Approve any pending CSR requests from inventory nodes] ******************* TASK [Dump all candidate bootstrap hostnames] ********************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Find all hostnames for bootstrapping] ************************************ ok: [node01] TASK [Dump the bootstrap hostnames] ******************************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Approve bootstrap nodes] ************************************************* changed: [node01] TASK [Get CSRs] **************************************************************** skipping: [node01] TASK [Report approval errors] ************************************************** skipping: [node01] PLAY [Ensure any inventory labels are applied to the nodes] ******************** TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [openshift_manage_node : Wait for master API to become available before proceeding] *** skipping: [node02] TASK [openshift_manage_node : Wait for Node Registration] ********************** ok: [node01 -> node01] ok: [node02 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/config.yml for node02, node01 TASK [openshift_manage_node : Set node schedulability] ************************* ok: [node01 -> node01] ok: [node02 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/set_default_node_role.yml for node02, node01 TASK [openshift_manage_node : Retrieve nodes that are marked with the infra selector or the legacy infra selector] *** ok: [node02 -> node01] TASK [openshift_manage_node : Label infra or legacy infra nodes with the new role label] *** TASK [openshift_manage_node : Retrieve non-infra, non-master nodes that are not yet labeled compute] *** ok: [node02 -> node01] TASK [openshift_manage_node : label non-master non-infra nodes compute] ******** TASK [openshift_manage_node : Label all-in-one master as a compute node] ******* skipping: [node02] PLAY RECAP ********************************************************************* localhost : ok=30 changed=0 unreachable=0 failed=0 node01 : ok=71 changed=3 unreachable=0 failed=0 node02 : ok=155 changed=33 unreachable=0 failed=0 INSTALLER STATUS *************************************************************** Initialization : Complete (0:04:00) Node Preparation : Complete (0:04:09) Sending file modes: C0755 110489328 oc Sending file modes: C0600 5649 admin.kubeconfig Cluster "node01:8443" set. Cluster "node01:8443" set. + set +e + kubectl get nodes --no-headers + cluster/kubectl.sh get nodes --no-headers node01 Ready compute,infra,master 22d v1.10.0+b81c8f8 node02 Ready compute 51s v1.10.0+b81c8f8 + kubectl_rc=0 + '[' 0 -ne 0 ']' ++ kubectl get nodes --no-headers ++ grep NotReady ++ cluster/kubectl.sh get nodes --no-headers + '[' -n '' ']' + set -e + echo 'Nodes are ready:' Nodes are ready: + kubectl get nodes + cluster/kubectl.sh get nodes NAME STATUS ROLES AGE VERSION node01 Ready compute,infra,master 22d v1.10.0+b81c8f8 node02 Ready compute 52s v1.10.0+b81c8f8 + make cluster-sync ./cluster/build.sh Building ... sha256:ceba12cbc33e4e37a707840478a630db561e2427b78c8c9f9cd6d0b73276ab32 go version go1.10 linux/amd64 go version go1.10 linux/amd64 make[1]: Entering directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' hack/dockerized "./hack/check.sh && KUBEVIRT_VERSION= ./hack/build-go.sh install " && ./hack/build-copy-artifacts.sh sha256:ceba12cbc33e4e37a707840478a630db561e2427b78c8c9f9cd6d0b73276ab32 go version go1.10 linux/amd64 Waiting for rsyncd to be ready go version go1.10 linux/amd64 find: '/root/go/src/kubevirt.io/kubevirt/_out/cmd': No such file or directory Compiling tests... compiled tests.test hack/build-docker.sh build Sending build context to Docker daemon 40.39 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 84920e004a40 Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-controller ---> Using cache ---> b4f3251c6468 Step 4/8 : WORKDIR /home/virt-controller ---> Using cache ---> 813752072d9d Step 5/8 : USER 1001 ---> Using cache ---> 88b3556f36b4 Step 6/8 : COPY virt-controller /usr/bin/virt-controller ---> 80c182391440 Removing intermediate container 26f6c08d29b9 Step 7/8 : ENTRYPOINT /usr/bin/virt-controller ---> Running in 45639f3d9275 ---> 5f9115759e23 Removing intermediate container 45639f3d9275 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-controller" '' ---> Running in 938077d20e02 ---> 4ad5a5e508ab Removing intermediate container 938077d20e02 Successfully built 4ad5a5e508ab Sending build context to Docker daemon 43.31 MB Step 1/9 : FROM kubevirt/libvirt:4.2.0 ---> 5f0bfe81a3e0 Step 2/9 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 09010a005182 Step 3/9 : RUN dnf -y install socat genisoimage util-linux libcgroup-tools ethtool net-tools sudo && dnf -y clean all && test $(id -u qemu) = 107 # make sure that the qemu user really is 107 ---> Using cache ---> fc9481693838 Step 4/9 : COPY virt-launcher /usr/bin/virt-launcher ---> 9db3559e62c0 Removing intermediate container 895929e86471 Step 5/9 : RUN setcap CAP_NET_BIND_SERVICE=+eip /usr/bin/qemu-system-x86_64 ---> Running in 3014c2bf42ae  ---> 3d94fee0de64 Removing intermediate container 3014c2bf42ae Step 6/9 : RUN mkdir -p /usr/share/kubevirt/virt-launcher ---> Running in b5b9ef0f90c8  ---> c01f41498991 Removing intermediate container b5b9ef0f90c8 Step 7/9 : COPY sock-connector /usr/share/kubevirt/virt-launcher/ ---> 38379584d89b Removing intermediate container fee6d413b22e Step 8/9 : ENTRYPOINT /usr/bin/virt-launcher ---> Running in 6da0e3386e93 ---> c6b9d7036569 Removing intermediate container 6da0e3386e93 Step 9/9 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-launcher" '' ---> Running in 39de9bd4865e ---> d68ad9690992 Removing intermediate container 39de9bd4865e Successfully built d68ad9690992 Sending build context to Docker daemon 41.68 MB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 84920e004a40 Step 3/5 : COPY virt-handler /usr/bin/virt-handler ---> d2b10336b221 Removing intermediate container 5e6e26d08af2 Step 4/5 : ENTRYPOINT /usr/bin/virt-handler ---> Running in 0fadcaa7d40d ---> 0f634eb72cda Removing intermediate container 0fadcaa7d40d Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-handler" '' ---> Running in 96fb43d2d253 ---> 3e5dfc08c68e Removing intermediate container 96fb43d2d253 Successfully built 3e5dfc08c68e Sending build context to Docker daemon 38.81 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 84920e004a40 Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-api ---> Using cache ---> 3cff23506e80 Step 4/8 : WORKDIR /home/virt-api ---> Using cache ---> e94c5606b96b Step 5/8 : USER 1001 ---> Using cache ---> af16317199f5 Step 6/8 : COPY virt-api /usr/bin/virt-api ---> e9d182d23a49 Removing intermediate container ed4f8e50d49e Step 7/8 : ENTRYPOINT /usr/bin/virt-api ---> Running in fcbb9bfcfd32 ---> 8bc1881a3574 Removing intermediate container fcbb9bfcfd32 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-api" '' ---> Running in 3ee5e6eab777 ---> 45ed709cd495 Removing intermediate container 3ee5e6eab777 Successfully built 45ed709cd495 Sending build context to Docker daemon 4.096 kB Step 1/7 : FROM fedora:28 ---> cc510acfcd70 Step 2/7 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 84920e004a40 Step 3/7 : ENV container docker ---> Using cache ---> aed3ca4ac3a3 Step 4/7 : RUN mkdir -p /images/custom /images/alpine && truncate -s 64M /images/custom/disk.img && curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /images/alpine/disk.img ---> Using cache ---> c7d0cf8fc982 Step 5/7 : ADD entrypoint.sh / ---> Using cache ---> 0393e5ee0c37 Step 6/7 : CMD /entrypoint.sh ---> Using cache ---> 23798f49dea3 Step 7/7 : LABEL "disks-images-provider" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> 628bfca144bf Successfully built 628bfca144bf Sending build context to Docker daemon 2.56 kB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 84920e004a40 Step 3/5 : ENV container docker ---> Using cache ---> aed3ca4ac3a3 Step 4/5 : RUN dnf -y install procps-ng nmap-ncat && dnf -y clean all ---> Using cache ---> d8c990eaf575 Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "vm-killer" '' ---> Using cache ---> 2ed275c4bfd0 Successfully built 2ed275c4bfd0 Sending build context to Docker daemon 5.12 kB Step 1/7 : FROM debian:sid ---> 68f33cf86aab Step 2/7 : MAINTAINER "David Vossel" \ ---> Using cache ---> 50fc79ebe51c Step 3/7 : ENV container docker ---> Using cache ---> b8e063496923 Step 4/7 : RUN apt-get update && apt-get install -y bash curl bzip2 qemu-utils && mkdir -p /disk && rm -rf /var/lib/apt/lists/* ---> Using cache ---> 8adb1572b35c Step 5/7 : ADD entry-point.sh / ---> Using cache ---> 8c0c5a52e4df Step 6/7 : CMD /entry-point.sh ---> Using cache ---> 1a4b838e5dee Step 7/7 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "registry-disk-v1alpha" '' ---> Using cache ---> 7aa3fd44f8c9 Successfully built 7aa3fd44f8c9 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33040/kubevirt/registry-disk-v1alpha:devel ---> 7aa3fd44f8c9 Step 2/4 : MAINTAINER "David Vossel" \ ---> Using cache ---> 5e0c3d37503b Step 3/4 : RUN curl https://download.cirros-cloud.net/0.4.0/cirros-0.4.0-x86_64-disk.img > /disk/cirros.img ---> Using cache ---> 2acb8de4d71e Step 4/4 : LABEL "cirros-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> 89f88bb54bf2 Successfully built 89f88bb54bf2 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33040/kubevirt/registry-disk-v1alpha:devel ---> 7aa3fd44f8c9 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 776bfb123af4 Step 3/4 : RUN curl -g -L https://download.fedoraproject.org/pub/fedora/linux/releases/27/CloudImages/x86_64/images/Fedora-Cloud-Base-27-1.6.x86_64.qcow2 > /disk/fedora.qcow2 ---> Using cache ---> 288211d2b493 Step 4/4 : LABEL "fedora-cloud-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> 0912477735f2 Successfully built 0912477735f2 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33040/kubevirt/registry-disk-v1alpha:devel ---> 7aa3fd44f8c9 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 776bfb123af4 Step 3/4 : RUN curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /disk/alpine.iso ---> Using cache ---> c0c8be599bed Step 4/4 : LABEL "alpine-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> f4b34e404811 Successfully built f4b34e404811 Sending build context to Docker daemon 35.59 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 84920e004a40 Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virtctl ---> Using cache ---> d74088d7a4fc Step 4/8 : WORKDIR /home/virtctl ---> Using cache ---> c8c857bf8d96 Step 5/8 : USER 1001 ---> Using cache ---> 36730a67b946 Step 6/8 : COPY subresource-access-test /subresource-access-test ---> eb3e0ba7f583 Removing intermediate container 22b65e21c583 Step 7/8 : ENTRYPOINT /subresource-access-test ---> Running in b3d2be923721 ---> 287889843305 Removing intermediate container b3d2be923721 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "subresource-access-test" '' ---> Running in 73a2085f0edd ---> 74733bda7394 Removing intermediate container 73a2085f0edd Successfully built 74733bda7394 Sending build context to Docker daemon 3.072 kB Step 1/9 : FROM fedora:28 ---> cc510acfcd70 Step 2/9 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 84920e004a40 Step 3/9 : ENV container docker ---> Using cache ---> aed3ca4ac3a3 Step 4/9 : RUN dnf -y install make git gcc && dnf -y clean all ---> Using cache ---> 6050b24a5d85 Step 5/9 : ENV GIMME_GO_VERSION 1.9.2 ---> Using cache ---> 0447d2178073 Step 6/9 : RUN mkdir -p /gimme && curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | HOME=/gimme bash >> /etc/profile.d/gimme.sh ---> Using cache ---> 291db82d955f Step 7/9 : ENV GOPATH "/go" GOBIN "/usr/bin" ---> Using cache ---> 793556477837 Step 8/9 : RUN mkdir -p /go && source /etc/profile.d/gimme.sh && go get github.com/masterzen/winrm-cli ---> Using cache ---> fd5c6e1f9461 Step 9/9 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "winrmcli" '' ---> Using cache ---> 91d1be1bcbe4 Successfully built 91d1be1bcbe4 Sending build context to Docker daemon 36.8 MB Step 1/5 : FROM fedora:27 ---> 9110ae7f579f Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 71a8c548e503 Step 3/5 : COPY example-hook-sidecar /example-hook-sidecar ---> e295adced057 Removing intermediate container 3d8bf39303e3 Step 4/5 : ENTRYPOINT /example-hook-sidecar ---> Running in 9620f169d6ce ---> 1ba87aa520da Removing intermediate container 9620f169d6ce Step 5/5 : LABEL "example-hook-sidecar" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Running in 5ea66e36112a ---> 0ccf3fef94cf Removing intermediate container 5ea66e36112a Successfully built 0ccf3fef94cf hack/build-docker.sh push The push refers to a repository [localhost:33040/kubevirt/virt-controller] 05ccacca1347: Preparing b2f5abdac324: Preparing 891e1e4ef82a: Preparing b2f5abdac324: Pushed 05ccacca1347: Pushed 891e1e4ef82a: Pushed devel: digest: sha256:18ed1eeb8f3e30cfdf9c78f976b612ec763cc2c4e9a3e6ab8ae6999c43967d97 size: 949 The push refers to a repository [localhost:33040/kubevirt/virt-launcher] 3b413dd0736a: Preparing c68bf9e4f7b9: Preparing 52730d7f9ff5: Preparing cb53a3057506: Preparing 0b99c4111657: Preparing da38cf808aa5: Preparing b83399358a92: Preparing 186d8b3e4fd8: Preparing fa6154170bf5: Preparing 5eefb9960a36: Preparing 891e1e4ef82a: Preparing 5eefb9960a36: Waiting b83399358a92: Waiting 891e1e4ef82a: Waiting 186d8b3e4fd8: Waiting fa6154170bf5: Waiting da38cf808aa5: Waiting c68bf9e4f7b9: Pushed 3b413dd0736a: Pushed da38cf808aa5: Pushed b83399358a92: Pushed 52730d7f9ff5: Pushed fa6154170bf5: Pushed 891e1e4ef82a: Mounted from kubevirt/virt-controller 186d8b3e4fd8: Pushed cb53a3057506: Pushed 0b99c4111657: Pushed 5eefb9960a36: Pushed devel: digest: sha256:d8452ebd5316bcda5c284e54c9b405415791e6eed9dc56b78202ab35ab066aab size: 2620 The push refers to a repository [localhost:33040/kubevirt/virt-handler] 4c50f3548c13: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-launcher 4c50f3548c13: Pushed devel: digest: sha256:21ee2a165b9bc2c25103a6a7391e1c6b32843e27fa6c8e1645f8fd9aeb0fd4f2 size: 741 The push refers to a repository [localhost:33040/kubevirt/virt-api] 9447c319a22c: Preparing afd1d781e4d1: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-handler afd1d781e4d1: Pushed 9447c319a22c: Pushed devel: digest: sha256:b8d617e3c83be0e53a949dcc98dd9c2b48c4fcda5361f99dbfa28affa905c699 size: 948 The push refers to a repository [localhost:33040/kubevirt/disks-images-provider] dc0875c44573: Preparing 8fc77a44094f: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-api dc0875c44573: Pushed 8fc77a44094f: Pushed devel: digest: sha256:d23d8d42ec6e15ae7ed6e778918aafb30b1527dcab703a192077860ecf796c74 size: 948 The push refers to a repository [localhost:33040/kubevirt/vm-killer] d1b69e768421: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/disks-images-provider d1b69e768421: Pushed devel: digest: sha256:e18b0719b6c92415bd3a9d4e45278bb4a4f7bccefbd3fe8c958aad9b913bc32c size: 740 The push refers to a repository [localhost:33040/kubevirt/registry-disk-v1alpha] 2a15632f54d4: Preparing 91a924e03d7c: Preparing 25edbec0eaea: Preparing 2a15632f54d4: Pushed 91a924e03d7c: Pushed 25edbec0eaea: Pushed devel: digest: sha256:93dbd4b6c598eae77e68f8119e129d092b75cfe0573a46c653a4578391b54edd size: 948 The push refers to a repository [localhost:33040/kubevirt/cirros-registry-disk-demo] f287bddc58c9: Preparing 2a15632f54d4: Preparing 91a924e03d7c: Preparing 25edbec0eaea: Preparing 91a924e03d7c: Mounted from kubevirt/registry-disk-v1alpha 25edbec0eaea: Mounted from kubevirt/registry-disk-v1alpha 2a15632f54d4: Mounted from kubevirt/registry-disk-v1alpha f287bddc58c9: Pushed devel: digest: sha256:d84ec6e1c3b1e790318b351a867571430b0f77022b609bf72c7edc11774869a2 size: 1160 The push refers to a repository [localhost:33040/kubevirt/fedora-cloud-registry-disk-demo] 191bddb21627: Preparing 2a15632f54d4: Preparing 91a924e03d7c: Preparing 25edbec0eaea: Preparing 91a924e03d7c: Mounted from kubevirt/cirros-registry-disk-demo 2a15632f54d4: Mounted from kubevirt/cirros-registry-disk-demo 25edbec0eaea: Mounted from kubevirt/cirros-registry-disk-demo 191bddb21627: Pushed devel: digest: sha256:721c5dc3b73e50b865b6d395e48884382c391509e18b4d77a3a27456a1eea65c size: 1161 The push refers to a repository [localhost:33040/kubevirt/alpine-registry-disk-demo] 8a362b640dc9: Preparing 2a15632f54d4: Preparing 91a924e03d7c: Preparing 25edbec0eaea: Preparing 25edbec0eaea: Mounted from kubevirt/fedora-cloud-registry-disk-demo 2a15632f54d4: Mounted from kubevirt/fedora-cloud-registry-disk-demo 91a924e03d7c: Mounted from kubevirt/fedora-cloud-registry-disk-demo 8a362b640dc9: Pushed devel: digest: sha256:6c9639e0cb8ed67572ed78aad285cce752608f39802ce49856474162feae16f5 size: 1160 The push refers to a repository [localhost:33040/kubevirt/subresource-access-test] 1941f43ca6ed: Preparing 4052ce9d0aff: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/vm-killer 4052ce9d0aff: Pushed 1941f43ca6ed: Pushed devel: digest: sha256:879fb6f15221d122b9a4935991f4330a818f0e12937c0d41f2c60b65ba1dec31 size: 948 The push refers to a repository [localhost:33040/kubevirt/winrmcli] 64ccc7ac4271: Preparing 4242962b50c3: Preparing 0e374d8c733e: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/subresource-access-test 64ccc7ac4271: Pushed 0e374d8c733e: Pushed 4242962b50c3: Pushed devel: digest: sha256:7ba212e34e7bbac39ae9d54624462c338a98987d0eb9f59f8bb24b123847d8b4 size: 1165 The push refers to a repository [localhost:33040/kubevirt/example-hook-sidecar] 7ef89ca384e8: Preparing 39bae602f753: Preparing 7ef89ca384e8: Pushed 39bae602f753: Pushed devel: digest: sha256:096ede5c1c160b8f681e5b40c5f86ed7069ed709e8c9d6a745579a9c517999b5 size: 740 make[1]: Leaving directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' Done ./cluster/clean.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release1 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release1 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-151-gdc3f4c7 ++ KUBEVIRT_VERSION=v0.7.0-151-gdc3f4c7 + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:50a4b8ee3e07d592e7e4fbf3eb1401980a5947499dfdc3d847c085b5775aaa9a ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:33040/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Cleaning up ...' Cleaning up ... + cluster/kubectl.sh get vmis --all-namespaces -o=custom-columns=NAME:.metadata.name,NAMESPACE:.metadata.namespace,FINALIZERS:.metadata.finalizers --no-headers + grep foregroundDeleteVirtualMachine + read p error: the server doesn't have a resource type "vmis" + _kubectl delete ds -l kubevirt.io -n kube-system --cascade=false --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=libvirt --force --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=virt-handler --force --grace-period 0 No resources found + namespaces=(default ${namespace}) + for i in '${namespaces[@]}' + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete deployment -l kubevirt.io No resources found + _kubectl -n default delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rs -l kubevirt.io No resources found + _kubectl -n default delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete services -l kubevirt.io No resources found + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n default delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete secrets -l kubevirt.io No resources found + _kubectl -n default delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pv -l kubevirt.io No resources found + _kubectl -n default delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pvc -l kubevirt.io No resources found + _kubectl -n default delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete ds -l kubevirt.io No resources found + _kubectl -n default delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n default delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pods -l kubevirt.io No resources found + _kubectl -n default delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n default delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rolebinding -l kubevirt.io No resources found + _kubectl -n default delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete roles -l kubevirt.io No resources found + _kubectl -n default delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterroles -l kubevirt.io No resources found + _kubectl -n default delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n default get crd offlinevirtualmachines.kubevirt.io ++ wc -l ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n default get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + for i in '${namespaces[@]}' + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete deployment -l kubevirt.io No resources found + _kubectl -n kube-system delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rs -l kubevirt.io No resources found + _kubectl -n kube-system delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete services -l kubevirt.io No resources found + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n kube-system delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete secrets -l kubevirt.io No resources found + _kubectl -n kube-system delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pv -l kubevirt.io No resources found + _kubectl -n kube-system delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pvc -l kubevirt.io No resources found + _kubectl -n kube-system delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete ds -l kubevirt.io No resources found + _kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n kube-system delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pods -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete roles -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterroles -l kubevirt.io No resources found + _kubectl -n kube-system delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io ++ wc -l ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + sleep 2 + echo Done Done ./cluster/deploy.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release1 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release1 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-151-gdc3f4c7 ++ KUBEVIRT_VERSION=v0.7.0-151-gdc3f4c7 + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:50a4b8ee3e07d592e7e4fbf3eb1401980a5947499dfdc3d847c085b5775aaa9a ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:33040/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Deploying ...' Deploying ... + [[ -z openshift-3.10-release ]] + [[ openshift-3.10-release =~ .*-dev ]] + [[ openshift-3.10-release =~ .*-release ]] + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/demo-content.yaml =~ .*demo.* ]] + continue + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml =~ .*demo.* ]] + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml clusterrole.rbac.authorization.k8s.io "kubevirt.io:admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:edit" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:view" created serviceaccount "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver-auth-delegator" created rolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created role.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-controller" created serviceaccount "kubevirt-controller" created serviceaccount "kubevirt-privileged" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller-cluster-admin" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-privileged-cluster-admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:default" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt.io:default" created service "virt-api" created deployment.extensions "virt-api" created deployment.extensions "virt-controller" created daemonset.extensions "virt-handler" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstances.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancereplicasets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancepresets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachines.kubevirt.io" created + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R persistentvolumeclaim "disk-alpine" created persistentvolume "host-path-disk-alpine" created persistentvolumeclaim "disk-custom" created persistentvolume "host-path-disk-custom" created daemonset.extensions "disks-images-provider" created serviceaccount "kubevirt-testing" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-testing-cluster-admin" created + [[ os-3.10.0 =~ os-* ]] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-controller"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-testing"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-privileged"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-apiserver"] + _kubectl adm policy add-scc-to-user privileged admin + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged admin scc "privileged" added to: ["admin"] + echo Done Done + namespaces=(kube-system default) + [[ kube-system != \k\u\b\e\-\s\y\s\t\e\m ]] + timeout=300 + sample=30 + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n kube-system --no-headers ++ cluster/kubectl.sh get pods -n kube-system --no-headers ++ grep -v Running + '[' -n 'disks-images-provider-g84sr 0/1 ContainerCreating 0 1s disks-images-provider-jj4df 0/1 ContainerCreating 0 1s virt-api-7d79764579-grzfd 0/1 ContainerCreating 0 3s virt-api-7d79764579-m89z6 0/1 ContainerCreating 0 3s virt-controller-7d57d96b65-mvwrb 0/1 ContainerCreating 0 3s virt-controller-7d57d96b65-rwnst 0/1 ContainerCreating 0 3s virt-handler-fkjw6 0/1 ContainerCreating 0 3s virt-handler-nf7mt 0/1 ContainerCreating 0 3s' ']' + echo 'Waiting for kubevirt pods to enter the Running state ...' Waiting for kubevirt pods to enter the Running state ... + kubectl get pods -n kube-system --no-headers + grep -v Running + cluster/kubectl.sh get pods -n kube-system --no-headers disks-images-provider-g84sr 0/1 ContainerCreating 0 1s disks-images-provider-jj4df 0/1 ContainerCreating 0 1s virt-api-7d79764579-grzfd 0/1 ContainerCreating 0 3s virt-api-7d79764579-m89z6 0/1 ContainerCreating 0 3s virt-controller-7d57d96b65-mvwrb 0/1 ContainerCreating 0 3s virt-controller-7d57d96b65-rwnst 0/1 ContainerCreating 0 3s virt-handler-fkjw6 0/1 ContainerCreating 0 3s virt-handler-nf7mt 0/1 ContainerCreating 0 3s + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system --no-headers ++ grep -v Running ++ cluster/kubectl.sh get pods -n kube-system --no-headers + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n false ']' + echo 'Waiting for KubeVirt containers to become ready ...' Waiting for KubeVirt containers to become ready ... + kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + grep false + cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers false + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n '' ']' + kubectl get pods -n kube-system + cluster/kubectl.sh get pods -n kube-system NAME READY STATUS RESTARTS AGE disks-images-provider-g84sr 1/1 Running 0 1m disks-images-provider-jj4df 1/1 Running 0 1m master-api-node01 1/1 Running 1 22d master-controllers-node01 1/1 Running 1 22d master-etcd-node01 1/1 Running 1 22d virt-api-7d79764579-grzfd 1/1 Running 1 1m virt-api-7d79764579-m89z6 1/1 Running 0 1m virt-controller-7d57d96b65-mvwrb 1/1 Running 0 1m virt-controller-7d57d96b65-rwnst 1/1 Running 0 1m virt-handler-fkjw6 1/1 Running 0 1m virt-handler-nf7mt 1/1 Running 0 1m + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n default --no-headers ++ grep -v Running ++ cluster/kubectl.sh get pods -n default --no-headers + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ cluster/kubectl.sh get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false + '[' -n '' ']' + kubectl get pods -n default + cluster/kubectl.sh get pods -n default NAME READY STATUS RESTARTS AGE docker-registry-1-rl562 1/1 Running 1 22d registry-console-1-rw9zf 1/1 Running 1 22d router-1-6cch9 1/1 Running 1 22d + kubectl version + cluster/kubectl.sh version oc v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 features: Basic-Auth GSSAPI Kerberos SPNEGO Server https://127.0.0.1:33037 openshift v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 + ginko_params='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + [[ openshift-3.10-release =~ windows.* ]] + FUNC_TEST_ARGS='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + make functest hack/dockerized "hack/build-func-tests.sh" sha256:ceba12cbc33e4e37a707840478a630db561e2427b78c8c9f9cd6d0b73276ab32 go version go1.10 linux/amd64 Waiting for rsyncd to be ready go version go1.10 linux/amd64 Compiling tests... compiled tests.test hack/functests.sh Running Suite: Tests Suite ========================== Random Seed: 1532942136 Will run 149 of 149 specs • 2018/07/30 05:16:29 read closing down: EOF ------------------------------ • [SLOW TEST:51.510 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userDataBase64 source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:81 should have cloud-init data /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:82 ------------------------------ • [SLOW TEST:170.003 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userDataBase64 source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:81 with injected ssh-key /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:92 should have ssh-key under authorized keys /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:93 ------------------------------ 2018/07/30 05:19:19 read closing down: EOF 2018/07/30 05:20:06 read closing down: EOF • [SLOW TEST:57.159 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userData source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:118 should process provided cloud-init data 2018/07/30 05:20:16 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:119 ------------------------------ 2018/07/30 05:21:04 read closing down: EOF • [SLOW TEST:48.115 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 should take user-data from k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:162 ------------------------------ • [SLOW TEST:16.146 seconds] VNC /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:54 with VNC connection /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:62 should allow accessing the VNC device /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:64 ------------------------------ •••••• ------------------------------ • [SLOW TEST:7.965 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:8.051 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given an vm /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:7.758 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi preset /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:7.806 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi replica set /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • ------------------------------ • [SLOW TEST:17.596 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 should start it /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:76 ------------------------------ Pod name: disks-images-provider-g84sr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jj4df Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-grzfd Pod phase: Running level=error timestamp=2018-07-30T09:21:20.951164Z pos=subresource.go:85 component=virt-api msg="connection failed: command terminated with exit code 126" 2018/07/30 09:21:20 http: response.WriteHeader on hijacked connection level=info timestamp=2018-07-30T09:21:20.951592Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmid646r/console proto=HTTP/1.1 statusCode=500 contentLength=0 level=error timestamp=2018-07-30T09:21:20.951520Z pos=subresource.go:97 component=virt-api reason="read tcp 10.128.0.23:8443->10.128.0.1:51192: use of closed network connection" msg="error ecountered reading from websocket stream" 2018/07/30 09:21:21 http: TLS handshake error from 10.128.0.1:36842: EOF level=info timestamp=2018-07-30T09:21:23.428876Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:21:31 http: TLS handshake error from 10.128.0.1:36892: EOF 2018/07/30 09:21:41 http: TLS handshake error from 10.128.0.1:36938: EOF 2018/07/30 09:21:51 http: TLS handshake error from 10.128.0.1:36984: EOF level=info timestamp=2018-07-30T09:21:53.398026Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:22:01 http: TLS handshake error from 10.128.0.1:37034: EOF 2018/07/30 09:22:11 http: TLS handshake error from 10.128.0.1:37082: EOF 2018/07/30 09:22:21 http: TLS handshake error from 10.128.0.1:37128: EOF 2018/07/30 09:22:31 http: TLS handshake error from 10.128.0.1:37180: EOF 2018/07/30 09:22:41 http: TLS handshake error from 10.128.0.1:37226: EOF Pod name: virt-api-7d79764579-m89z6 Pod phase: Running level=info timestamp=2018-07-30T09:22:10.862013Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:22:10.901376Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:22:10.915058Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:22:16.960652Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:22:18 http: TLS handshake error from 10.129.0.1:50538: EOF level=info timestamp=2018-07-30T09:22:21.613455Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:22:23.687281Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T09:22:27.215977Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:22:28 http: TLS handshake error from 10.129.0.1:50548: EOF level=info timestamp=2018-07-30T09:22:30.375049Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:22:37.308786Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:22:37.400682Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T09:22:37.402090Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/30 09:22:38 http: TLS handshake error from 10.129.0.1:50558: EOF level=info timestamp=2018-07-30T09:22:39.917837Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-mvwrb Pod phase: Running level=info timestamp=2018-07-30T09:14:13.494977Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-rwnst Pod phase: Running level=info timestamp=2018-07-30T09:21:26.372985Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-30T09:21:26.375565Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/default/testvm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: ee99a208-93d9-11e8-82c1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance default/testvm" level=info timestamp=2018-07-30T09:21:26.381706Z pos=vm.go:135 component=virt-controller service=http namespace=default name=testvm kind= uid=ee4ae430-93d9-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T09:21:26.381797Z pos=vm.go:186 component=virt-controller service=http namespace=default name=testvm kind= uid=ee4ae430-93d9-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-30T09:21:26.381871Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-30T09:21:58.777129Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8p5h7 kind= uid=023ff09a-93da-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:21:58.777266Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8p5h7 kind= uid=023ff09a-93da-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:21:58.859123Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8p5h7\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8p5h7" level=info timestamp=2018-07-30T09:21:59.061621Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi88hq5 kind= uid=026bd064-93da-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:21:59.061716Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi88hq5 kind= uid=026bd064-93da-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:21:59.176586Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi88hq5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi88hq5" level=info timestamp=2018-07-30T09:21:59.214101Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi88hq5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi88hq5" level=info timestamp=2018-07-30T09:22:16.655821Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigttcd kind= uid=0ce842df-93da-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:22:16.655949Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigttcd kind= uid=0ce842df-93da-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:22:16.727272Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigttcd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigttcd" Pod name: virt-handler-fkjw6 Pod phase: Running level=info timestamp=2018-07-30T09:14:14.924659Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-30T09:14:14.931000Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-30T09:14:14.932213Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-30T09:14:15.040585Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-30T09:14:15.106891Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-07-30T09:14:15.121677Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-handler-nf7mt Pod phase: Running level=info timestamp=2018-07-30T09:22:16.504663Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi88hq5 kind= uid=026bd064-93da-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:22:16.504774Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmi88hq5 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T09:22:16.505428Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmi88hq5 kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T09:22:16.505773Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi88hq5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:22:31.120443Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmigttcd kind= uid=0ce842df-93da-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:22:31.856739Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-30T09:22:31.856961Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmigttcd kind=Domain uid=0ce842df-93da-11e8-82c1-525500d15501 msg="Domain is in state Paused reason StartingUp" level=info timestamp=2018-07-30T09:22:32.136071Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:22:32.136600Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmigttcd kind=Domain uid=0ce842df-93da-11e8-82c1-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T09:22:32.161148Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:22:32.163437Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmigttcd kind= uid=0ce842df-93da-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:22:32.164928Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmigttcd kind= uid=0ce842df-93da-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T09:22:32.179550Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmigttcd kind= uid=0ce842df-93da-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:22:32.179766Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmigttcd kind= uid=0ce842df-93da-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:22:32.183717Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmigttcd kind= uid=0ce842df-93da-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmigttcd-rpkt7 Pod phase: Running • Failure [27.042 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 should attach virt-launcher to it [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:82 Timed out after 11.000s. Expected : to contain substring : Found PID for /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:92 ------------------------------ level=info timestamp=2018-07-30T09:22:16.762866Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmigttcd kind=VirtualMachineInstance uid=0ce842df-93da-11e8-82c1-525500d15501 msg="Created virtual machine pod virt-launcher-testvmigttcd-rpkt7" level=info timestamp=2018-07-30T09:22:31.098332Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmigttcd kind=VirtualMachineInstance uid=0ce842df-93da-11e8-82c1-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmigttcd-rpkt7" level=info timestamp=2018-07-30T09:22:32.169223Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmigttcd kind=VirtualMachineInstance uid=0ce842df-93da-11e8-82c1-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T09:22:32.198446Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmigttcd kind=VirtualMachineInstance uid=0ce842df-93da-11e8-82c1-525500d15501 msg="VirtualMachineInstance started." STEP: Getting virt-launcher logs ••••2018/07/30 05:23:35 read closing down: EOF ------------------------------ • [SLOW TEST:51.109 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:170 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Alpine as first boot /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ 2018/07/30 05:24:01 read closing down: EOF • [SLOW TEST:26.056 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:170 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Cirros as first boot /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:16.158 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:201 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:202 should retry starting the VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:203 ------------------------------ • [SLOW TEST:17.324 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:201 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:202 should log warning and proceed once the secret is there /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:233 ------------------------------ • [SLOW TEST:40.644 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 when virt-launcher crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:281 should be stopped and have Failed phase /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:282 ------------------------------ Pod name: disks-images-provider-g84sr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jj4df Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-grzfd Pod phase: Running 2018/07/30 09:24:31 http: TLS handshake error from 10.128.0.1:37764: EOF 2018/07/30 09:24:41 http: TLS handshake error from 10.128.0.1:37810: EOF 2018/07/30 09:24:51 http: TLS handshake error from 10.128.0.1:37856: EOF level=info timestamp=2018-07-30T09:24:53.397457Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:25:01 http: TLS handshake error from 10.128.0.1:37906: EOF 2018/07/30 09:25:11 http: TLS handshake error from 10.128.0.1:37954: EOF 2018/07/30 09:25:21 http: TLS handshake error from 10.128.0.1:38000: EOF level=info timestamp=2018-07-30T09:25:23.414018Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:25:31 http: TLS handshake error from 10.128.0.1:38050: EOF 2018/07/30 09:25:41 http: TLS handshake error from 10.128.0.1:38096: EOF 2018/07/30 09:25:51 http: TLS handshake error from 10.128.0.1:38142: EOF 2018/07/30 09:26:01 http: TLS handshake error from 10.128.0.1:38192: EOF 2018/07/30 09:26:11 http: TLS handshake error from 10.128.0.1:38238: EOF 2018/07/30 09:26:21 http: TLS handshake error from 10.128.0.1:38284: EOF 2018/07/30 09:26:31 http: TLS handshake error from 10.128.0.1:38334: EOF Pod name: virt-api-7d79764579-m89z6 Pod phase: Running level=info timestamp=2018-07-30T09:25:52.074505Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:25:53.702910Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T09:25:58.286790Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:25:58 http: TLS handshake error from 10.129.0.1:50764: EOF level=info timestamp=2018-07-30T09:26:00.852964Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:26:08.328625Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:26:08 http: TLS handshake error from 10.129.0.1:50774: EOF level=info timestamp=2018-07-30T09:26:10.335476Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:26:18.379018Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:26:18 http: TLS handshake error from 10.129.0.1:50784: EOF level=info timestamp=2018-07-30T09:26:22.138578Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:26:23.696144Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T09:26:28.420672Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:26:28 http: TLS handshake error from 10.129.0.1:50794: EOF level=info timestamp=2018-07-30T09:26:30.901264Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-mvwrb Pod phase: Running level=info timestamp=2018-07-30T09:14:13.494977Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-rwnst Pod phase: Running level=info timestamp=2018-07-30T09:22:44.446294Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijxldp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijxldp" level=info timestamp=2018-07-30T09:23:35.464902Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmit6k6f kind= uid=3be15cd0-93da-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:23:35.465058Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmit6k6f kind= uid=3be15cd0-93da-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:23:35.520724Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmit6k6f\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmit6k6f" level=info timestamp=2018-07-30T09:23:35.540551Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmit6k6f\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmit6k6f" level=info timestamp=2018-07-30T09:24:01.515427Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilcvqc kind= uid=4b6896c6-93da-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:24:01.515589Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilcvqc kind= uid=4b6896c6-93da-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:24:17.675665Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwhzrg kind= uid=550a5a1b-93da-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:24:17.675795Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwhzrg kind= uid=550a5a1b-93da-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:24:35.009612Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigdctx kind= uid=5f5ece5b-93da-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:24:35.009989Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigdctx kind= uid=5f5ece5b-93da-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:24:35.112359Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigdctx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigdctx" level=info timestamp=2018-07-30T09:25:15.645238Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6zwpj kind= uid=7797fd6d-93da-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:25:15.645390Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6zwpj kind= uid=7797fd6d-93da-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:25:15.717018Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6zwpj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6zwpj" Pod name: virt-handler-fkjw6 Pod phase: Running level=info timestamp=2018-07-30T09:14:14.924659Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-30T09:14:14.931000Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-30T09:14:14.932213Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-30T09:14:15.040585Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-30T09:14:15.106891Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-07-30T09:14:15.121677Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-handler-nf7mt Pod phase: Running level=info timestamp=2018-07-30T09:25:36.926457Z pos=virt-handler.go:87 component=virt-handler hostname=node02 level=info timestamp=2018-07-30T09:25:36.939942Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-30T09:25:36.942971Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-30T09:25:36.943139Z pos=cache.go:121 component=virt-handler msg="List domains from sock /var/run/kubevirt/sockets/kubevirt-test-default_testvmi6zwpj_sock" level=info timestamp=2018-07-30T09:25:36.980927Z pos=cache.go:121 component=virt-handler msg="List domains from sock /var/run/kubevirt/sockets/kubevirt-test-default_testvmilcvqc_sock" level=error timestamp=2018-07-30T09:25:36.981204Z pos=cache.go:124 component=virt-handler reason="dial unix /var/run/kubevirt/sockets/kubevirt-test-default_testvmilcvqc_sock: connect: connection refused" msg="failed to connect to cmd client socket" level=info timestamp=2018-07-30T09:25:36.981916Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmi6zwpj kind=Domain uid= msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T09:25:37.041271Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-30T09:25:37.055833Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-07-30T09:25:37.063537Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-30T09:25:37.145257Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi6zwpj kind=VirtualMachineInstance uid=7797fd6d-93da-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:25:37.160435Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6zwpj kind=VirtualMachineInstance uid=7797fd6d-93da-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi6zwpj-hr6ms Pod phase: Running Pod name: vmi-killerfh8q4 Pod phase: Succeeded Pod name: vmi-killerh544z Pod phase: Succeeded • Failure [81.341 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 when virt-handler crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:305 should recover and continue management [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:306 Expected : Running to equal : Failed /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:332 ------------------------------ level=info timestamp=2018-07-30T09:25:15.690686Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi6zwpj kind=VirtualMachineInstance uid=7797fd6d-93da-11e8-82c1-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi6zwpj-hr6ms" level=info timestamp=2018-07-30T09:25:30.518363Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi6zwpj kind=VirtualMachineInstance uid=7797fd6d-93da-11e8-82c1-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi6zwpj-hr6ms" level=info timestamp=2018-07-30T09:25:31.507840Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi6zwpj kind=VirtualMachineInstance uid=7797fd6d-93da-11e8-82c1-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T09:25:31.524666Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi6zwpj kind=VirtualMachineInstance uid=7797fd6d-93da-11e8-82c1-525500d15501 msg="VirtualMachineInstance started." STEP: Crashing the virt-handler STEP: Killing the VirtualMachineInstance level=info timestamp=2018-07-30T09:25:36.133394Z pos=utils.go:254 component=tests namespace=kubevirt-test-default name=testvmi6zwpj kind=VirtualMachineInstance uid=7797fd6d-93da-11e8-82c1-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi6zwpj-hr6ms" level=info timestamp=2018-07-30T09:25:36.133482Z pos=utils.go:254 component=tests namespace=kubevirt-test-default name=testvmi6zwpj kind=VirtualMachineInstance uid=7797fd6d-93da-11e8-82c1-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi6zwpj-hr6ms" level=info timestamp=2018-07-30T09:25:36.133814Z pos=utils.go:254 component=tests namespace=kubevirt-test-default name=testvmi6zwpj kind=VirtualMachineInstance uid=7797fd6d-93da-11e8-82c1-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T09:25:36.133971Z pos=utils.go:254 component=tests namespace=kubevirt-test-default name=testvmi6zwpj kind=VirtualMachineInstance uid=7797fd6d-93da-11e8-82c1-525500d15501 msg="VirtualMachineInstance started." level=info timestamp=2018-07-30T09:25:37.143393Z pos=utils.go:254 component=tests namespace=kubevirt-test-default name=testvmi6zwpj kind=VirtualMachineInstance uid=7797fd6d-93da-11e8-82c1-525500d15501 msg="VirtualMachineInstance defined." STEP: Checking that VirtualMachineInstance has 'Failed' phase • [SLOW TEST:82.459 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 when virt-handler is responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:336 should indicate that a node is ready for vmis /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:337 ------------------------------ • [SLOW TEST:56.267 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 when virt-handler is not responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:367 the node controller should react /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:406 ------------------------------ • [SLOW TEST:18.624 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 with node tainted /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:459 the vmi with tolerations should be scheduled /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:481 ------------------------------ • ------------------------------ S [SKIPPING] [0.240 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 with non default namespace /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:531 should log libvirt start and stop lifecycle events of the domain /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 kubevirt-test-default [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Skip log query tests for JENKINS ci test environment /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:536 ------------------------------ S [SKIPPING] [0.086 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 with non default namespace /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:531 should log libvirt start and stop lifecycle events of the domain /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 kubevirt-test-alternative [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Skip log query tests for JENKINS ci test environment /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:536 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.097 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:592 should enable emulation in virt-launcher [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:604 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:600 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.081 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:592 should be reflected in domain XML [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:641 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:600 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.075 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:592 should request a TUN device but not KVM [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:685 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:600 ------------------------------ •••• ------------------------------ • [SLOW TEST:38.085 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Delete a VirtualMachineInstance's Pod /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:837 should result in the VirtualMachineInstance moving to a finalized state /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:838 ------------------------------ • [SLOW TEST:26.966 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:869 with an active pod. /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:870 should result in pod being terminated /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:871 ------------------------------ Pod name: disks-images-provider-g84sr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jj4df Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-grzfd Pod phase: Running 2018/07/30 09:29:01 http: TLS handshake error from 10.128.0.1:39058: EOF 2018/07/30 09:29:11 http: TLS handshake error from 10.128.0.1:39104: EOF 2018/07/30 09:29:21 http: TLS handshake error from 10.128.0.1:39154: EOF 2018/07/30 09:29:31 http: TLS handshake error from 10.128.0.1:39206: EOF 2018/07/30 09:29:41 http: TLS handshake error from 10.128.0.1:39252: EOF 2018/07/30 09:29:51 http: TLS handshake error from 10.128.0.1:39298: EOF level=info timestamp=2018-07-30T09:29:53.423165Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:30:01 http: TLS handshake error from 10.128.0.1:39348: EOF 2018/07/30 09:30:11 http: TLS handshake error from 10.128.0.1:39394: EOF 2018/07/30 09:30:21 http: TLS handshake error from 10.128.0.1:39440: EOF 2018/07/30 09:30:31 http: TLS handshake error from 10.128.0.1:39490: EOF 2018/07/30 09:30:41 http: TLS handshake error from 10.128.0.1:39536: EOF 2018/07/30 09:30:51 http: TLS handshake error from 10.128.0.1:39582: EOF level=info timestamp=2018-07-30T09:30:53.379377Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:31:01 http: TLS handshake error from 10.128.0.1:39632: EOF Pod name: virt-api-7d79764579-m89z6 Pod phase: Running 2018/07/30 09:30:28 http: TLS handshake error from 10.129.0.1:51038: EOF level=info timestamp=2018-07-30T09:30:30.105030Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:30:31.377942Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:30:37.703048Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T09:30:37.704187Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/30 09:30:38 http: TLS handshake error from 10.129.0.1:51048: EOF level=info timestamp=2018-07-30T09:30:40.159917Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:30:41.116943Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:30:48 http: TLS handshake error from 10.129.0.1:51058: EOF level=info timestamp=2018-07-30T09:30:50.211412Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:30:52.777014Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:30:58 http: TLS handshake error from 10.129.0.1:51068: EOF level=info timestamp=2018-07-30T09:31:00.249828Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:31:01.433924Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:31:08 http: TLS handshake error from 10.129.0.1:51078: EOF Pod name: virt-controller-7d57d96b65-mvwrb Pod phase: Running level=info timestamp=2018-07-30T09:14:13.494977Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-rwnst Pod phase: Running level=info timestamp=2018-07-30T09:29:16.372851Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip6fvn kind= uid=07141dc7-93db-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:29:16.626354Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir5psj kind= uid=073a8c1e-93db-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:29:16.626503Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir5psj kind= uid=073a8c1e-93db-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:29:16.677398Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmir5psj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmir5psj" level=info timestamp=2018-07-30T09:29:16.695900Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmir5psj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmir5psj" level=info timestamp=2018-07-30T09:29:17.144925Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilqtsm kind= uid=0789d03e-93db-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:29:17.145097Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilqtsm kind= uid=0789d03e-93db-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:29:17.222697Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilqtsm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilqtsm" level=info timestamp=2018-07-30T09:29:17.377615Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilqtsm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilqtsm" level=info timestamp=2018-07-30T09:29:55.223291Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil8nvh kind= uid=1e3c328b-93db-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:29:55.223427Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil8nvh kind= uid=1e3c328b-93db-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:29:55.275884Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmil8nvh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmil8nvh" level=info timestamp=2018-07-30T09:29:55.292349Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmil8nvh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmil8nvh" level=info timestamp=2018-07-30T09:30:22.355399Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:30:22.355519Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-2hh2l Pod phase: Running level=info timestamp=2018-07-30T09:30:10.867962Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmil8nvh kind=Domain uid=1e3c328b-93db-11e8-82c1-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T09:30:10.885653Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:30:10.946071Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmil8nvh kind= uid=1e3c328b-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:30:10.946688Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmil8nvh kind= uid=1e3c328b-93db-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:30:10.952269Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmil8nvh kind= uid=1e3c328b-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:30:10.981133Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmil8nvh kind= uid=1e3c328b-93db-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T09:30:10.981428Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmil8nvh kind= uid=1e3c328b-93db-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T09:30:10.981959Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmil8nvh kind= uid=1e3c328b-93db-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmil8nvh" level=info timestamp=2018-07-30T09:30:11.030767Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmil8nvh kind= uid=1e3c328b-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:30:22.146400Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmil8nvh kind= uid=1e3c328b-93db-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T09:30:22.146566Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmil8nvh kind= uid=1e3c328b-93db-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T09:30:22.147110Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmil8nvh kind= uid=1e3c328b-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:30:22.177055Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmil8nvh kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T09:30:22.177144Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmil8nvh kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T09:30:22.177432Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmil8nvh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-ztw2p Pod phase: Running level=info timestamp=2018-07-30T09:29:14.243628Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmip7dlb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:30:38.384715Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:30:39.075619Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-30T09:30:39.077099Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind=Domain uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Domain is in state Paused reason StartingUp" level=info timestamp=2018-07-30T09:30:39.376165Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:30:39.376355Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind=Domain uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T09:30:39.388167Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:30:39.390811Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T09:30:39.402130Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:30:39.418500Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:30:39.419316Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:30:39.425758Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:30:39.516496Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T09:30:39.516572Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T09:30:39.516771Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." • Failure [47.965 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:869 with grace period greater than 0 /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:894 should run graceful shutdown [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:895 Timed out after 30.001s. Expected : level=info timestamp=2018-07-30T09:29:12.086202Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmip7dlb kind= uid=faafa0f0-93da-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:29:13.257114Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-30T09:29:13.258464Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmip7dlb kind=Domain uid=faafa0f0-93da-11e8-82c1-525500d15501 msg="Domain is in state Paused reason StartingUp" level=info timestamp=2018-07-30T09:29:13.685887Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:29:13.687041Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmip7dlb kind=Domain uid=faafa0f0-93da-11e8-82c1-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T09:29:13.711357Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmip7dlb kind= uid=faafa0f0-93da-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:29:13.711748Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmip7dlb kind= uid=faafa0f0-93da-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T09:29:13.717573Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:29:13.752104Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmip7dlb kind= uid=faafa0f0-93da-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:29:13.753593Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmip7dlb kind= uid=faafa0f0-93da-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:29:13.758400Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmip7dlb kind= uid=faafa0f0-93da-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:29:14.052613Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmip7dlb kind= uid=faafa0f0-93da-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T09:29:14.052786Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmip7dlb kind= uid=faafa0f0-93da-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T09:29:14.056678Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmip7dlb kind= uid=faafa0f0-93da-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmip7dlb" level=info timestamp=2018-07-30T09:29:14.188076Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmip7dlb kind= uid=faafa0f0-93da-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:29:14.243243Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmip7dlb kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T09:29:14.243375Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmip7dlb kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T09:29:14.243628Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmip7dlb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:30:38.384715Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:30:39.075619Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-30T09:30:39.077099Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind=Domain uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Domain is in state Paused reason StartingUp" level=info timestamp=2018-07-30T09:30:39.376165Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:30:39.376355Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind=Domain uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T09:30:39.388167Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:30:39.390811Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T09:30:39.402130Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:30:39.418500Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:30:39.419316Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:30:39.425758Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:30:39.516496Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T09:30:39.516572Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T09:30:39.516771Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." to contain substring : Signaled graceful shutdown for testvmicqq4f /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:933 ------------------------------ STEP: Setting a VirtualMachineInstance termination grace period to 5 STEP: Creating the VirtualMachineInstance level=info timestamp=2018-07-30T09:30:22.465677Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmicqq4f kind=VirtualMachineInstance uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Created virtual machine pod virt-launcher-testvmicqq4f-d2xcc" level=info timestamp=2018-07-30T09:30:38.659878Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmicqq4f kind=VirtualMachineInstance uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmicqq4f-d2xcc" level=info timestamp=2018-07-30T09:30:39.701619Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmicqq4f kind=VirtualMachineInstance uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T09:30:39.707429Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmicqq4f kind=VirtualMachineInstance uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="VirtualMachineInstance started." STEP: Deleting the VirtualMachineInstance level=info timestamp=2018-07-30T09:30:39.854961Z pos=utils.go:254 component=tests namespace=kubevirt-test-default name=testvmicqq4f kind=VirtualMachineInstance uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Created virtual machine pod virt-launcher-testvmicqq4f-d2xcc" level=info timestamp=2018-07-30T09:30:39.855062Z pos=utils.go:254 component=tests namespace=kubevirt-test-default name=testvmicqq4f kind=VirtualMachineInstance uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmicqq4f-d2xcc" level=info timestamp=2018-07-30T09:30:39.859048Z pos=utils.go:254 component=tests namespace=kubevirt-test-default name=testvmicqq4f kind=VirtualMachineInstance uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T09:30:39.859131Z pos=utils.go:254 component=tests namespace=kubevirt-test-default name=testvmicqq4f kind=VirtualMachineInstance uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="VirtualMachineInstance started." level=info timestamp=2018-07-30T09:30:39.942349Z pos=utils.go:254 component=tests namespace=kubevirt-test-default name=testvmicqq4f kind=VirtualMachineInstance uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Deleted virtual machine pod virt-launcher-testvmicqq4f-d2xcc" level=info timestamp=2018-07-30T09:30:39.942407Z pos=utils.go:254 component=tests namespace=kubevirt-test-default name=testvmicqq4f kind=VirtualMachineInstance uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="VirtualMachineInstance stopping" STEP: Checking that virt-handler logs VirtualMachineInstance graceful shutdown Pod name: disks-images-provider-g84sr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jj4df Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-grzfd Pod phase: Running 2018/07/30 09:30:41 http: TLS handshake error from 10.128.0.1:39536: EOF 2018/07/30 09:30:51 http: TLS handshake error from 10.128.0.1:39582: EOF level=info timestamp=2018-07-30T09:30:53.379377Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:31:01 http: TLS handshake error from 10.128.0.1:39632: EOF 2018/07/30 09:31:11 http: TLS handshake error from 10.128.0.1:39678: EOF 2018/07/30 09:31:21 http: TLS handshake error from 10.128.0.1:39726: EOF level=info timestamp=2018-07-30T09:31:23.402785Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:31:31 http: TLS handshake error from 10.128.0.1:39776: EOF 2018/07/30 09:31:41 http: TLS handshake error from 10.128.0.1:39822: EOF 2018/07/30 09:31:51 http: TLS handshake error from 10.128.0.1:39868: EOF 2018/07/30 09:32:01 http: TLS handshake error from 10.128.0.1:39918: EOF 2018/07/30 09:32:11 http: TLS handshake error from 10.128.0.1:39964: EOF 2018/07/30 09:32:21 http: TLS handshake error from 10.128.0.1:40010: EOF level=info timestamp=2018-07-30T09:32:23.434707Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:32:31 http: TLS handshake error from 10.128.0.1:40060: EOF Pod name: virt-api-7d79764579-m89z6 Pod phase: Running level=info timestamp=2018-07-30T09:31:50.491451Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:31:52.935491Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:31:53.745700Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:31:58 http: TLS handshake error from 10.129.0.1:51130: EOF level=info timestamp=2018-07-30T09:32:00.543731Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:32:01.533924Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:32:08 http: TLS handshake error from 10.129.0.1:51140: EOF level=info timestamp=2018-07-30T09:32:10.592588Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:32:11.266902Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:32:18 http: TLS handshake error from 10.129.0.1:51150: EOF level=info timestamp=2018-07-30T09:32:20.645987Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:32:22.982180Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:32:28 http: TLS handshake error from 10.129.0.1:51160: EOF level=info timestamp=2018-07-30T09:32:30.718672Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:32:31.575837Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-mvwrb Pod phase: Running level=info timestamp=2018-07-30T09:14:13.494977Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-rwnst Pod phase: Running level=info timestamp=2018-07-30T09:29:16.695900Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmir5psj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmir5psj" level=info timestamp=2018-07-30T09:29:17.144925Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilqtsm kind= uid=0789d03e-93db-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:29:17.145097Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilqtsm kind= uid=0789d03e-93db-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:29:17.222697Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilqtsm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilqtsm" level=info timestamp=2018-07-30T09:29:17.377615Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilqtsm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilqtsm" level=info timestamp=2018-07-30T09:29:55.223291Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil8nvh kind= uid=1e3c328b-93db-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:29:55.223427Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil8nvh kind= uid=1e3c328b-93db-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:29:55.275884Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmil8nvh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmil8nvh" level=info timestamp=2018-07-30T09:29:55.292349Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmil8nvh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmil8nvh" level=info timestamp=2018-07-30T09:30:22.355399Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:30:22.355519Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:31:10.309388Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmignpvs kind= uid=4afd183a-93db-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:31:10.309502Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmignpvs kind= uid=4afd183a-93db-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:31:10.467146Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmignpvs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmignpvs" level=info timestamp=2018-07-30T09:31:10.489567Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmignpvs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmignpvs" Pod name: virt-handler-2hh2l Pod phase: Running level=info timestamp=2018-07-30T09:30:22.147110Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmil8nvh kind= uid=1e3c328b-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:30:22.177055Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmil8nvh kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T09:30:22.177144Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmil8nvh kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T09:30:22.177432Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmil8nvh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:31:24.228582Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmignpvs kind= uid=4afd183a-93db-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:31:24.919768Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-30T09:31:24.921891Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmignpvs kind=Domain uid=4afd183a-93db-11e8-82c1-525500d15501 msg="Domain is in state Paused reason StartingUp" level=info timestamp=2018-07-30T09:31:25.215035Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:31:25.215236Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmignpvs kind=Domain uid=4afd183a-93db-11e8-82c1-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T09:31:25.234119Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:31:25.236747Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmignpvs kind= uid=4afd183a-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:31:25.236836Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmignpvs kind= uid=4afd183a-93db-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T09:31:25.262465Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmignpvs kind= uid=4afd183a-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:31:25.262542Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmignpvs kind= uid=4afd183a-93db-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:31:25.266024Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmignpvs kind= uid=4afd183a-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-ztw2p Pod phase: Running level=info timestamp=2018-07-30T09:30:39.388167Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:30:39.390811Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T09:30:39.402130Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:30:39.418500Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:30:39.419316Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:30:39.425758Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:30:39.516496Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T09:30:39.516572Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T09:30:39.516771Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:31:09.808389Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T09:31:09.809133Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T09:31:09.809722Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:31:09.826199Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T09:31:09.826278Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T09:31:09.826982Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmignpvs-wxsx7 Pod phase: Running Pod name: vmi-killer5qd2p Pod phase: Succeeded • Failure [85.638 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Killed VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:946 should be in Failed phase [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:947 Expected : Running to equal : Failed /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:970 ------------------------------ STEP: Starting a VirtualMachineInstance level=info timestamp=2018-07-30T09:31:10.417869Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmignpvs kind=VirtualMachineInstance uid=4afd183a-93db-11e8-82c1-525500d15501 msg="Created virtual machine pod virt-launcher-testvmignpvs-wxsx7" level=info timestamp=2018-07-30T09:31:24.201057Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmignpvs kind=VirtualMachineInstance uid=4afd183a-93db-11e8-82c1-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmignpvs-wxsx7" level=info timestamp=2018-07-30T09:31:25.231049Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmignpvs kind=VirtualMachineInstance uid=4afd183a-93db-11e8-82c1-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T09:31:25.246847Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmignpvs kind=VirtualMachineInstance uid=4afd183a-93db-11e8-82c1-525500d15501 msg="VirtualMachineInstance started." STEP: Killing the VirtualMachineInstance level=info timestamp=2018-07-30T09:31:35.304642Z pos=utils.go:254 component=tests namespace=kubevirt-test-default name=testvmignpvs kind=VirtualMachineInstance uid=4afd183a-93db-11e8-82c1-525500d15501 msg="Created virtual machine pod virt-launcher-testvmignpvs-wxsx7" level=info timestamp=2018-07-30T09:31:35.304724Z pos=utils.go:254 component=tests namespace=kubevirt-test-default name=testvmignpvs kind=VirtualMachineInstance uid=4afd183a-93db-11e8-82c1-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmignpvs-wxsx7" level=info timestamp=2018-07-30T09:31:35.305040Z pos=utils.go:254 component=tests namespace=kubevirt-test-default name=testvmignpvs kind=VirtualMachineInstance uid=4afd183a-93db-11e8-82c1-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T09:31:35.305115Z pos=utils.go:254 component=tests namespace=kubevirt-test-default name=testvmignpvs kind=VirtualMachineInstance uid=4afd183a-93db-11e8-82c1-525500d15501 msg="VirtualMachineInstance started." STEP: Checking that the VirtualMachineInstance has 'Failed' phase • [SLOW TEST:81.254 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Killed VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:946 should be left alone by virt-handler /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:974 ------------------------------ •••• Pod name: disks-images-provider-g84sr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jj4df Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-grzfd Pod phase: Running level=info timestamp=2018-07-30T09:34:26.359173Z pos=subresource.go:75 component=virt-api msg="Websocket connection upgraded" 2018/07/30 09:34:31 http: TLS handshake error from 10.128.0.1:40642: EOF 2018/07/30 09:34:41 http: TLS handshake error from 10.128.0.1:40688: EOF 2018/07/30 09:34:51 http: TLS handshake error from 10.128.0.1:40734: EOF level=info timestamp=2018-07-30T09:34:53.432007Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:35:01 http: TLS handshake error from 10.128.0.1:40784: EOF level=error timestamp=2018-07-30T09:35:09.713242Z pos=subresource.go:85 component=virt-api msg= 2018/07/30 09:35:09 http: response.WriteHeader on hijacked connection level=info timestamp=2018-07-30T09:35:09.713945Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmidjnqk/console proto=HTTP/1.1 statusCode=200 contentLength=0 level=error timestamp=2018-07-30T09:35:09.713865Z pos=subresource.go:97 component=virt-api reason="read tcp 10.128.0.23:8443->10.128.0.1:54976: use of closed network connection" msg="error ecountered reading from websocket stream" 2018/07/30 09:35:11 http: TLS handshake error from 10.128.0.1:40830: EOF 2018/07/30 09:35:21 http: TLS handshake error from 10.128.0.1:40876: EOF level=info timestamp=2018-07-30T09:35:23.406634Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:35:31 http: TLS handshake error from 10.128.0.1:40926: EOF 2018/07/30 09:35:41 http: TLS handshake error from 10.128.0.1:40972: EOF Pod name: virt-api-7d79764579-m89z6 Pod phase: Running level=info timestamp=2018-07-30T09:35:01.843577Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:35:08 http: TLS handshake error from 10.129.0.1:51330: EOF level=info timestamp=2018-07-30T09:35:11.539304Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:35:11.593019Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:35:18 http: TLS handshake error from 10.129.0.1:51340: EOF level=info timestamp=2018-07-30T09:35:21.595378Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:35:23.605466Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:35:28 http: TLS handshake error from 10.129.0.1:51350: EOF level=info timestamp=2018-07-30T09:35:31.644550Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:35:31.907797Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:35:37.245085Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T09:35:37.246348Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/30 09:35:38 http: TLS handshake error from 10.129.0.1:51360: EOF level=info timestamp=2018-07-30T09:35:41.646596Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:35:41.696572Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-mvwrb Pod phase: Running level=info timestamp=2018-07-30T09:14:13.494977Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-rwnst Pod phase: Running level=info timestamp=2018-07-30T09:29:55.223427Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil8nvh kind= uid=1e3c328b-93db-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:29:55.275884Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmil8nvh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmil8nvh" level=info timestamp=2018-07-30T09:29:55.292349Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmil8nvh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmil8nvh" level=info timestamp=2018-07-30T09:30:22.355399Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:30:22.355519Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:31:10.309388Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmignpvs kind= uid=4afd183a-93db-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:31:10.309502Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmignpvs kind= uid=4afd183a-93db-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:31:10.467146Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmignpvs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmignpvs" level=info timestamp=2018-07-30T09:31:10.489567Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmignpvs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmignpvs" level=info timestamp=2018-07-30T09:32:35.944972Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimxnld kind= uid=7e0839f4-93db-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:32:35.945138Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimxnld kind= uid=7e0839f4-93db-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:32:36.023054Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimxnld\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimxnld" level=info timestamp=2018-07-30T09:32:36.049282Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimxnld\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimxnld" level=info timestamp=2018-07-30T09:34:11.279552Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidjnqk kind= uid=b6db2729-93db-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:34:11.287590Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidjnqk kind= uid=b6db2729-93db-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-2hh2l Pod phase: Running level=info timestamp=2018-07-30T09:34:25.754138Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-30T09:34:25.755090Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmidjnqk kind=Domain uid=b6db2729-93db-11e8-82c1-525500d15501 msg="Domain is in state Shutoff reason Unknown" level=info timestamp=2018-07-30T09:34:26.551098Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:34:26.551299Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmidjnqk kind=Domain uid=b6db2729-93db-11e8-82c1-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T09:34:26.557197Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidjnqk kind= uid=b6db2729-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:34:26.557264Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmidjnqk kind= uid=b6db2729-93db-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T09:34:26.578199Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidjnqk kind= uid=b6db2729-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:34:26.581241Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:34:26.582450Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmidjnqk kind= uid=b6db2729-93db-11e8-82c1-525500d15501 msg="No update processing required" level=error timestamp=2018-07-30T09:34:26.589279Z pos=vm.go:404 component=virt-handler namespace=kubevirt-test-default name=testvmidjnqk kind= uid=b6db2729-93db-11e8-82c1-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidjnqk\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-07-30T09:34:26.589661Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidjnqk\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmidjnqk" level=info timestamp=2018-07-30T09:34:26.589841Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmidjnqk kind= uid=b6db2729-93db-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:34:26.592928Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidjnqk kind= uid=b6db2729-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:34:26.594963Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmidjnqk kind= uid=b6db2729-93db-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:34:26.598345Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidjnqk kind= uid=b6db2729-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-ztw2p Pod phase: Running level=info timestamp=2018-07-30T09:30:39.388167Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:30:39.390811Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T09:30:39.402130Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:30:39.418500Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:30:39.419316Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:30:39.425758Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:30:39.516496Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T09:30:39.516572Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T09:30:39.516771Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:31:09.808389Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T09:31:09.809133Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T09:31:09.809722Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind= uid=2e67f7b3-93db-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:31:09.826199Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T09:31:09.826278Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T09:31:09.826982Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicqq4f kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmidjnqk-zcsfg Pod phase: Running 2018/07/30 05:35:45 read closing down: EOF ------------------------------ • Failure [94.556 seconds] Health Monitoring /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:37 A VirtualMachineInstance with a watchdog device /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:56 should be shut down when the watchdog expires [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:57 Timed out after 40.005s. Expected : Running to equal : Failed /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:85 ------------------------------ STEP: Starting a VirtualMachineInstance level=info timestamp=2018-07-30T09:34:11.336188Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmidjnqk kind=VirtualMachineInstance uid=b6db2729-93db-11e8-82c1-525500d15501 msg="Created virtual machine pod virt-launcher-testvmidjnqk-zcsfg" level=info timestamp=2018-07-30T09:34:25.502388Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmidjnqk kind=VirtualMachineInstance uid=b6db2729-93db-11e8-82c1-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmidjnqk-zcsfg" level=info timestamp=2018-07-30T09:34:26.551570Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmidjnqk kind=VirtualMachineInstance uid=b6db2729-93db-11e8-82c1-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T09:34:26.568807Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmidjnqk kind=VirtualMachineInstance uid=b6db2729-93db-11e8-82c1-525500d15501 msg="VirtualMachineInstance started." STEP: Expecting the VirtualMachineInstance console STEP: Killing the watchdog device STEP: Checking that the VirtualMachineInstance has Failed status • [SLOW TEST:17.784 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should successfully start with hook sidecar annotation /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:60 ------------------------------ • [SLOW TEST:17.269 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should call Collect and OnDefineDomain on the hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:67 ------------------------------ • [SLOW TEST:19.403 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should update domain XML with SM BIOS properties /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:83 ------------------------------ • ------------------------------ • [SLOW TEST:11.839 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should scale /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 to five, to six and then to zero replicas /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ •• ------------------------------ • [SLOW TEST:18.581 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should update readyReplicas once VMIs are up /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:157 ------------------------------ • [SLOW TEST:15.686 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should remove VMIs once it is marked for deletion /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:169 ------------------------------ • ------------------------------ • [SLOW TEST:5.487 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should not scale when paused and scale when resume /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:223 ------------------------------ • [SLOW TEST:5.559 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should remove the finished VM /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:279 ------------------------------ • [SLOW TEST:53.360 seconds] 2018/07/30 05:38:38 read closing down: EOF Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 with a cirros image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:67 should return that we are running cirros /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:68 ------------------------------ 2018/07/30 05:39:32 read closing down: EOF • [SLOW TEST:54.231 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 with a fedora image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:77 should return that we are running fedora /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:78 ------------------------------ 2018/07/30 05:40:18 read closing down: EOF 2018/07/30 05:40:20 read closing down: EOF 2018/07/30 05:40:21 read closing down: EOF 2018/07/30 05:40:21 read closing down: EOF • [SLOW TEST:49.345 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should be able to reconnect to console multiple times 2018/07/30 05:40:22 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:87 ------------------------------ • [SLOW TEST:18.282 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should wait until the virtual machine is in running state and return a stream interface /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:103 ------------------------------ • [SLOW TEST:30.222 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should fail waiting for the virtual machine instance to be running /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:111 ------------------------------ • [SLOW TEST:30.226 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should fail waiting for the expecter /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:134 ------------------------------ ••••••••••• 2018/07/30 05:42:36 read closing down: EOF ------------------------------ • [SLOW TEST:50.289 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with Disk PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ 2018/07/30 05:43:26 read closing down: EOF • [SLOW TEST:49.985 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with CDRom PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ Pod name: disks-images-provider-g84sr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jj4df Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-grzfd Pod phase: Running 2018/07/30 09:43:51 http: TLS handshake error from 10.128.0.1:43638: EOF level=info timestamp=2018-07-30T09:43:54.167023Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:44:01 http: TLS handshake error from 10.128.0.1:43688: EOF 2018/07/30 09:44:11 http: TLS handshake error from 10.128.0.1:43734: EOF 2018/07/30 09:44:21 http: TLS handshake error from 10.128.0.1:43780: EOF level=info timestamp=2018-07-30T09:44:24.162595Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:44:31 http: TLS handshake error from 10.128.0.1:43830: EOF 2018/07/30 09:44:41 http: TLS handshake error from 10.128.0.1:43876: EOF 2018/07/30 09:44:51 http: TLS handshake error from 10.128.0.1:43922: EOF level=info timestamp=2018-07-30T09:44:54.197726Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:45:01 http: TLS handshake error from 10.128.0.1:43972: EOF 2018/07/30 09:45:11 http: TLS handshake error from 10.128.0.1:44018: EOF 2018/07/30 09:45:21 http: TLS handshake error from 10.128.0.1:44064: EOF level=info timestamp=2018-07-30T09:45:24.183600Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:45:31 http: TLS handshake error from 10.128.0.1:44114: EOF Pod name: virt-api-7d79764579-m89z6 Pod phase: Running 2018/07/30 09:44:58 http: TLS handshake error from 10.129.0.1:51944: EOF level=info timestamp=2018-07-30T09:45:03.313775Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:45:08.545368Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:45:08 http: TLS handshake error from 10.129.0.1:51954: EOF level=info timestamp=2018-07-30T09:45:12.824859Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:45:18.590843Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:45:18 http: TLS handshake error from 10.129.0.1:51964: EOF level=info timestamp=2018-07-30T09:45:28.655155Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:45:28.932645Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:45:28 http: TLS handshake error from 10.129.0.1:51974: EOF level=info timestamp=2018-07-30T09:45:33.361841Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:45:38.301172Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T09:45:38.302822Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T09:45:38.702177Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:45:38 http: TLS handshake error from 10.129.0.1:51984: EOF Pod name: virt-controller-7d57d96b65-mvwrb Pod phase: Running level=info timestamp=2018-07-30T09:14:13.494977Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-rwnst Pod phase: Running level=info timestamp=2018-07-30T09:41:44.693362Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmid6ft7\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmid6ft7, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: c4f9633e-93dc-11e8-82c1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmid6ft7" level=info timestamp=2018-07-30T09:41:45.666578Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir8x7z kind= uid=c5b091b7-93dc-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:41:45.666807Z pos=preset.go:255 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir8x7z kind= uid=c5b091b7-93dc-11e8-82c1-525500d15501 msg="VirtualMachineInstancePreset test-conflict-p6946 matches VirtualMachineInstance" level=info timestamp=2018-07-30T09:41:45.666897Z pos=preset.go:255 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir8x7z kind= uid=c5b091b7-93dc-11e8-82c1-525500d15501 msg="VirtualMachineInstancePreset test-memory-g28hd matches VirtualMachineInstance" level=error timestamp=2018-07-30T09:41:45.668352Z pos=preset.go:415 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir8x7z kind= uid=c5b091b7-93dc-11e8-82c1-525500d15501 msg="VirtualMachinePresets cannot be applied due to conflicts: presets 'test-memory-g28hd' and 'test-conflict-p6946' conflict: spec.resources.requests[memory]: {{128 6} {} 128M DecimalSI} != {{256 6} {} 256M DecimalSI}" level=warning timestamp=2018-07-30T09:41:45.668947Z pos=preset.go:157 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir8x7z kind= uid=c5b091b7-93dc-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as failed" level=info timestamp=2018-07-30T09:41:45.669079Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir8x7z kind= uid=c5b091b7-93dc-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:41:45.970532Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizz6hg kind= uid=c5dbf36f-93dc-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:41:45.970802Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizz6hg kind= uid=c5dbf36f-93dc-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:41:46.127518Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizz6hg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizz6hg" level=info timestamp=2018-07-30T09:41:46.188773Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizz6hg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizz6hg" level=info timestamp=2018-07-30T09:42:36.364134Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih4sbs kind= uid=e3e9148b-93dc-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:42:36.364292Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih4sbs kind= uid=e3e9148b-93dc-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:43:26.352519Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwjjwg kind= uid=01b48dc2-93dd-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:43:26.352723Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwjjwg kind= uid=01b48dc2-93dd-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-2hh2l Pod phase: Running level=info timestamp=2018-07-30T09:43:40.490461Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiwjjwg kind= uid=01b48dc2-93dd-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:43:41.146034Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-30T09:43:41.146285Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmiwjjwg kind=Domain uid=01b48dc2-93dd-11e8-82c1-525500d15501 msg="Domain is in state Paused reason StartingUp" level=info timestamp=2018-07-30T09:43:41.388848Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:43:41.390596Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmiwjjwg kind=Domain uid=01b48dc2-93dd-11e8-82c1-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T09:43:41.410909Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:43:41.417006Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiwjjwg kind= uid=01b48dc2-93dd-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:43:41.417851Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmiwjjwg kind= uid=01b48dc2-93dd-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T09:43:41.440434Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiwjjwg kind= uid=01b48dc2-93dd-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:43:41.440850Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiwjjwg kind= uid=01b48dc2-93dd-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:43:41.445761Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiwjjwg kind= uid=01b48dc2-93dd-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:43:41.472971Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmiwjjwg kind= uid=01b48dc2-93dd-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T09:43:41.473439Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmiwjjwg kind= uid=01b48dc2-93dd-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T09:43:41.473948Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmiwjjwg kind= uid=01b48dc2-93dd-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmiwjjwg" level=info timestamp=2018-07-30T09:43:41.550616Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiwjjwg kind= uid=01b48dc2-93dd-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-ztw2p Pod phase: Running level=info timestamp=2018-07-30T09:38:02.603760Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:02.603828Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T09:38:02.624223Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:02.624696Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:38:02.639624Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:02.640340Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:38:02.642639Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:38:02.645617Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:38.264389Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T09:38:38.264703Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T09:38:38.265508Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvminzqq8" level=info timestamp=2018-07-30T09:38:38.312998Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:38.368992Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T09:38:38.369076Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T09:38:38.369267Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." • Failure [135.446 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started and stopped multiple times /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with Disk PVC [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Timed out after 120.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1065 ------------------------------ STEP: Starting and stopping the VirtualMachineInstance number of times STEP: Starting a VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance will start level=info timestamp=2018-07-30T09:43:26.502901Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmiwjjwg kind=VirtualMachineInstance uid=01b48dc2-93dd-11e8-82c1-525500d15501 msg="Created virtual machine pod virt-launcher-testvmiwjjwg-n99p2" level=info timestamp=2018-07-30T09:43:40.466799Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmiwjjwg kind=VirtualMachineInstance uid=01b48dc2-93dd-11e8-82c1-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmiwjjwg-n99p2" level=info timestamp=2018-07-30T09:43:41.406944Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmiwjjwg kind=VirtualMachineInstance uid=01b48dc2-93dd-11e8-82c1-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T09:43:41.427030Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmiwjjwg kind=VirtualMachineInstance uid=01b48dc2-93dd-11e8-82c1-525500d15501 msg="VirtualMachineInstance started." Pod name: disks-images-provider-g84sr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jj4df Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-grzfd Pod phase: Running 2018/07/30 09:46:01 http: TLS handshake error from 10.128.0.1:44256: EOF 2018/07/30 09:46:11 http: TLS handshake error from 10.128.0.1:44302: EOF 2018/07/30 09:46:21 http: TLS handshake error from 10.128.0.1:44350: EOF 2018/07/30 09:46:31 http: TLS handshake error from 10.128.0.1:44400: EOF 2018/07/30 09:46:41 http: TLS handshake error from 10.128.0.1:44446: EOF 2018/07/30 09:46:51 http: TLS handshake error from 10.128.0.1:44492: EOF level=info timestamp=2018-07-30T09:46:54.207573Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:47:01 http: TLS handshake error from 10.128.0.1:44542: EOF 2018/07/30 09:47:11 http: TLS handshake error from 10.128.0.1:44588: EOF level=info timestamp=2018-07-30T09:47:21.248338Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T09:47:21.271356Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:47:21 http: TLS handshake error from 10.128.0.1:44642: EOF 2018/07/30 09:47:31 http: TLS handshake error from 10.128.0.1:44692: EOF 2018/07/30 09:47:41 http: TLS handshake error from 10.128.0.1:44738: EOF 2018/07/30 09:47:51 http: TLS handshake error from 10.128.0.1:44784: EOF Pod name: virt-api-7d79764579-m89z6 Pod phase: Running 2018/07/30 09:47:18 http: TLS handshake error from 10.129.0.1:52086: EOF level=info timestamp=2018-07-30T09:47:19.211304Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:47:24.504691Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:47:28 http: TLS handshake error from 10.129.0.1:52096: EOF level=info timestamp=2018-07-30T09:47:29.367197Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:47:29.444090Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:47:33.778205Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:47:38.068605Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T09:47:38.070371Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/30 09:47:38 http: TLS handshake error from 10.129.0.1:52106: EOF level=info timestamp=2018-07-30T09:47:39.465577Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:47:43.097235Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:47:48 http: TLS handshake error from 10.129.0.1:52116: EOF level=info timestamp=2018-07-30T09:47:49.519931Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:47:54.487850Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-mvwrb Pod phase: Running level=info timestamp=2018-07-30T09:14:13.494977Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-rwnst Pod phase: Running level=info timestamp=2018-07-30T09:41:45.666807Z pos=preset.go:255 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir8x7z kind= uid=c5b091b7-93dc-11e8-82c1-525500d15501 msg="VirtualMachineInstancePreset test-conflict-p6946 matches VirtualMachineInstance" level=info timestamp=2018-07-30T09:41:45.666897Z pos=preset.go:255 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir8x7z kind= uid=c5b091b7-93dc-11e8-82c1-525500d15501 msg="VirtualMachineInstancePreset test-memory-g28hd matches VirtualMachineInstance" level=error timestamp=2018-07-30T09:41:45.668352Z pos=preset.go:415 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir8x7z kind= uid=c5b091b7-93dc-11e8-82c1-525500d15501 msg="VirtualMachinePresets cannot be applied due to conflicts: presets 'test-memory-g28hd' and 'test-conflict-p6946' conflict: spec.resources.requests[memory]: {{128 6} {} 128M DecimalSI} != {{256 6} {} 256M DecimalSI}" level=warning timestamp=2018-07-30T09:41:45.668947Z pos=preset.go:157 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir8x7z kind= uid=c5b091b7-93dc-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as failed" level=info timestamp=2018-07-30T09:41:45.669079Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir8x7z kind= uid=c5b091b7-93dc-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:41:45.970532Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizz6hg kind= uid=c5dbf36f-93dc-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:41:45.970802Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizz6hg kind= uid=c5dbf36f-93dc-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:41:46.127518Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizz6hg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizz6hg" level=info timestamp=2018-07-30T09:41:46.188773Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizz6hg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizz6hg" level=info timestamp=2018-07-30T09:42:36.364134Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih4sbs kind= uid=e3e9148b-93dc-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:42:36.364292Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih4sbs kind= uid=e3e9148b-93dc-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:43:26.352519Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwjjwg kind= uid=01b48dc2-93dd-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:43:26.352723Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwjjwg kind= uid=01b48dc2-93dd-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:45:41.798279Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirh2r7 kind= uid=526fd17b-93dd-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:45:41.798416Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirh2r7 kind= uid=526fd17b-93dd-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-2hh2l Pod phase: Running level=info timestamp=2018-07-30T09:45:56.208968Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirh2r7 kind= uid=526fd17b-93dd-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:45:56.851711Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-30T09:45:56.853039Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmirh2r7 kind=Domain uid=526fd17b-93dd-11e8-82c1-525500d15501 msg="Domain is in state Paused reason StartingUp" level=info timestamp=2018-07-30T09:45:57.137548Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:45:57.137870Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmirh2r7 kind=Domain uid=526fd17b-93dd-11e8-82c1-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T09:45:57.163697Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:45:57.166624Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirh2r7 kind= uid=526fd17b-93dd-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:45:57.167642Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmirh2r7 kind= uid=526fd17b-93dd-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T09:45:57.181975Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirh2r7 kind= uid=526fd17b-93dd-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:45:57.182888Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmirh2r7 kind= uid=526fd17b-93dd-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:45:57.186868Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirh2r7 kind= uid=526fd17b-93dd-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:45:57.209743Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmirh2r7 kind= uid=526fd17b-93dd-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T09:45:57.210006Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmirh2r7 kind= uid=526fd17b-93dd-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T09:45:57.210672Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmirh2r7 kind= uid=526fd17b-93dd-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmirh2r7" level=info timestamp=2018-07-30T09:45:57.241747Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirh2r7 kind= uid=526fd17b-93dd-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-ztw2p Pod phase: Running level=info timestamp=2018-07-30T09:38:02.603760Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:02.603828Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T09:38:02.624223Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:02.624696Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:38:02.639624Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:02.640340Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:38:02.642639Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:38:02.645617Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:38.264389Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T09:38:38.264703Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T09:38:38.265508Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvminzqq8" level=info timestamp=2018-07-30T09:38:38.312998Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:38.368992Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T09:38:38.369076Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T09:38:38.369267Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." • Failure [135.737 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started and stopped multiple times /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with CDRom PVC [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Timed out after 120.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1065 ------------------------------ STEP: Starting and stopping the VirtualMachineInstance number of times STEP: Starting a VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance will start level=info timestamp=2018-07-30T09:45:41.887913Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmirh2r7 kind=VirtualMachineInstance uid=526fd17b-93dd-11e8-82c1-525500d15501 msg="Created virtual machine pod virt-launcher-testvmirh2r7-pm548" level=info timestamp=2018-07-30T09:45:56.188621Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmirh2r7 kind=VirtualMachineInstance uid=526fd17b-93dd-11e8-82c1-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmirh2r7-pm548" level=info timestamp=2018-07-30T09:45:57.150109Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmirh2r7 kind=VirtualMachineInstance uid=526fd17b-93dd-11e8-82c1-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T09:45:57.165022Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmirh2r7 kind=VirtualMachineInstance uid=526fd17b-93dd-11e8-82c1-525500d15501 msg="VirtualMachineInstance started." • [SLOW TEST:50.878 seconds] 2018/07/30 05:48:48 read closing down: EOF Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With an emptyDisk defined /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:113 should create a writeable emptyDisk with the right capacity /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:115 ------------------------------ • [SLOW TEST:51.813 seconds] 2018/07/30 05:49:39 read closing down: EOF Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With an emptyDisk defined and a specified serial number /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:163 should create a writeable emptyDisk with the specified serial number /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:165 ------------------------------ 2018/07/30 05:50:28 read closing down: EOF • [SLOW TEST:48.541 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With ephemeral alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:205 should be successfully started /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:207 ------------------------------ Pod name: disks-images-provider-g84sr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jj4df Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-grzfd Pod phase: Running 2018/07/30 09:51:21 http: TLS handshake error from 10.128.0.1:45812: EOF level=info timestamp=2018-07-30T09:51:24.167934Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:51:31 http: TLS handshake error from 10.128.0.1:45862: EOF 2018/07/30 09:51:41 http: TLS handshake error from 10.128.0.1:45908: EOF 2018/07/30 09:51:51 http: TLS handshake error from 10.128.0.1:45954: EOF 2018/07/30 09:52:01 http: TLS handshake error from 10.128.0.1:46004: EOF 2018/07/30 09:52:11 http: TLS handshake error from 10.128.0.1:46050: EOF 2018/07/30 09:52:21 http: TLS handshake error from 10.128.0.1:46098: EOF level=info timestamp=2018-07-30T09:52:24.244588Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:52:31 http: TLS handshake error from 10.128.0.1:46148: EOF 2018/07/30 09:52:41 http: TLS handshake error from 10.128.0.1:46198: EOF 2018/07/30 09:52:51 http: TLS handshake error from 10.128.0.1:46244: EOF level=info timestamp=2018-07-30T09:52:54.222433Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:53:01 http: TLS handshake error from 10.128.0.1:46294: EOF 2018/07/30 09:53:11 http: TLS handshake error from 10.128.0.1:46340: EOF Pod name: virt-api-7d79764579-m89z6 Pod phase: Running level=info timestamp=2018-07-30T09:52:38.704204Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T09:52:38.705236Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/30 09:52:38 http: TLS handshake error from 10.129.0.1:52416: EOF level=info timestamp=2018-07-30T09:52:41.092477Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:52:43.631743Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:52:48 http: TLS handshake error from 10.129.0.1:52426: EOF level=info timestamp=2018-07-30T09:52:51.136402Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:52:58 http: TLS handshake error from 10.129.0.1:52436: EOF level=info timestamp=2018-07-30T09:53:00.239461Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:53:01.194697Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:53:04.907411Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:53:08 http: TLS handshake error from 10.129.0.1:52446: EOF level=info timestamp=2018-07-30T09:53:11.252233Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:53:13.683977Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:53:18 http: TLS handshake error from 10.129.0.1:52456: EOF Pod name: virt-controller-7d57d96b65-mvwrb Pod phase: Running level=info timestamp=2018-07-30T09:14:13.494977Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-rwnst Pod phase: Running level=info timestamp=2018-07-30T09:45:41.798279Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirh2r7 kind= uid=526fd17b-93dd-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:45:41.798416Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirh2r7 kind= uid=526fd17b-93dd-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:47:57.535899Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwwqt7 kind= uid=a357b22d-93dd-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:47:57.536102Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwwqt7 kind= uid=a357b22d-93dd-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:47:57.601260Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwwqt7\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwwqt7" level=info timestamp=2018-07-30T09:47:57.618958Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwwqt7\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwwqt7" level=info timestamp=2018-07-30T09:48:48.415468Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipnsvd kind= uid=c1ab88f9-93dd-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:48:48.415634Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipnsvd kind= uid=c1ab88f9-93dd-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:48:48.502727Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipnsvd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipnsvd" level=info timestamp=2018-07-30T09:49:40.230354Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2zvzs kind= uid=e08de305-93dd-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:49:40.230514Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2zvzs kind= uid=e08de305-93dd-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:50:28.770268Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5k94g kind= uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:50:28.770533Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5k94g kind= uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:50:28.840182Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5k94g\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5k94g" level=info timestamp=2018-07-30T09:50:28.853176Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5k94g\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5k94g" Pod name: virt-handler-2hh2l Pod phase: Running level=info timestamp=2018-07-30T09:50:44.165264Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi5k94g kind= uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:50:44.861815Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-30T09:50:44.862365Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmi5k94g kind=Domain uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="Domain is in state Paused reason StartingUp" level=info timestamp=2018-07-30T09:50:45.143560Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:50:45.144474Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmi5k94g kind=Domain uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T09:50:45.167265Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5k94g kind= uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:50:45.167789Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:50:45.169752Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi5k94g kind= uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T09:50:45.193455Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5k94g kind= uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:50:45.193665Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi5k94g kind= uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:50:45.198484Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5k94g kind= uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:51:19.530122Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmi5k94g kind= uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T09:51:19.530212Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmi5k94g kind= uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T09:51:19.532426Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmi5k94g kind= uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmi5k94g" level=info timestamp=2018-07-30T09:51:19.618916Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5k94g kind= uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-ztw2p Pod phase: Running level=info timestamp=2018-07-30T09:38:02.603760Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:02.603828Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T09:38:02.624223Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:02.624696Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:38:02.639624Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:02.640340Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:38:02.642639Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:38:02.645617Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:38.264389Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T09:38:38.264703Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T09:38:38.265508Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvminzqq8" level=info timestamp=2018-07-30T09:38:38.312998Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:38.368992Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T09:38:38.369076Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T09:38:38.369267Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." 2018/07/30 05:53:19 read closing down: EOF • Failure [171.091 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With ephemeral alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:205 should not persist data [It] /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:218 Timed out after 120.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1065 ------------------------------ STEP: Starting the VirtualMachineInstance STEP: Starting a VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance will start level=info timestamp=2018-07-30T09:50:28.854304Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi5k94g kind=VirtualMachineInstance uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi5k94g-xf6vn" level=info timestamp=2018-07-30T09:50:44.141668Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi5k94g kind=VirtualMachineInstance uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi5k94g-xf6vn" level=info timestamp=2018-07-30T09:50:45.167728Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi5k94g kind=VirtualMachineInstance uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T09:50:45.179155Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi5k94g kind=VirtualMachineInstance uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="VirtualMachineInstance started." STEP: Writing an arbitrary file to it's EFI partition STEP: Killing a VirtualMachineInstance Pod name: disks-images-provider-g84sr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jj4df Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-grzfd Pod phase: Running 2018/07/30 09:53:41 http: TLS handshake error from 10.128.0.1:46482: EOF 2018/07/30 09:53:51 http: TLS handshake error from 10.128.0.1:46528: EOF level=info timestamp=2018-07-30T09:53:54.238337Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:54:01 http: TLS handshake error from 10.128.0.1:46578: EOF 2018/07/30 09:54:11 http: TLS handshake error from 10.128.0.1:46624: EOF 2018/07/30 09:54:21 http: TLS handshake error from 10.128.0.1:46670: EOF level=info timestamp=2018-07-30T09:54:24.240548Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:54:31 http: TLS handshake error from 10.128.0.1:46720: EOF 2018/07/30 09:54:41 http: TLS handshake error from 10.128.0.1:46766: EOF 2018/07/30 09:54:51 http: TLS handshake error from 10.128.0.1:46812: EOF 2018/07/30 09:55:01 http: TLS handshake error from 10.128.0.1:46862: EOF 2018/07/30 09:55:11 http: TLS handshake error from 10.128.0.1:46908: EOF 2018/07/30 09:55:21 http: TLS handshake error from 10.128.0.1:46956: EOF 2018/07/30 09:55:31 http: TLS handshake error from 10.128.0.1:47006: EOF 2018/07/30 09:55:41 http: TLS handshake error from 10.128.0.1:47052: EOF Pod name: virt-api-7d79764579-m89z6 Pod phase: Running level=info timestamp=2018-07-30T09:55:17.708190Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:55:17.722578Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:55:18 http: TLS handshake error from 10.129.0.1:52578: EOF level=info timestamp=2018-07-30T09:55:21.886477Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:55:24.479266Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:55:28 http: TLS handshake error from 10.129.0.1:52588: EOF level=info timestamp=2018-07-30T09:55:30.517027Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:55:31.933951Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:55:35.213585Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:55:38.722142Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T09:55:38.723164Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/30 09:55:38 http: TLS handshake error from 10.129.0.1:52598: EOF level=info timestamp=2018-07-30T09:55:42.016197Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:55:43.944579Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:55:48 http: TLS handshake error from 10.129.0.1:52608: EOF Pod name: virt-controller-7d57d96b65-mvwrb Pod phase: Running level=info timestamp=2018-07-30T09:14:13.494977Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-rwnst Pod phase: Running level=info timestamp=2018-07-30T09:47:57.618958Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwwqt7\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwwqt7" level=info timestamp=2018-07-30T09:48:48.415468Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipnsvd kind= uid=c1ab88f9-93dd-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:48:48.415634Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipnsvd kind= uid=c1ab88f9-93dd-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:48:48.502727Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipnsvd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipnsvd" level=info timestamp=2018-07-30T09:49:40.230354Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2zvzs kind= uid=e08de305-93dd-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:49:40.230514Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2zvzs kind= uid=e08de305-93dd-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:50:28.770268Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5k94g kind= uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:50:28.770533Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5k94g kind= uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:50:28.840182Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5k94g\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5k94g" level=info timestamp=2018-07-30T09:50:28.853176Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5k94g\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5k94g" level=info timestamp=2018-07-30T09:53:19.912208Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmildvx8 kind= uid=637e6173-93de-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:53:19.912362Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmildvx8 kind= uid=637e6173-93de-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:53:19.992135Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmildvx8\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmildvx8" level=info timestamp=2018-07-30T09:53:20.003582Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmildvx8\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmildvx8" level=info timestamp=2018-07-30T09:53:20.015431Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmildvx8\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmildvx8" Pod name: virt-handler-2hh2l Pod phase: Running level=info timestamp=2018-07-30T09:53:49.819008Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildvx8 kind= uid=637e6173-93de-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:53:49.819395Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmildvx8 kind= uid=637e6173-93de-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T09:53:49.833180Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:53:49.850061Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildvx8 kind= uid=637e6173-93de-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:53:49.850269Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmildvx8 kind= uid=637e6173-93de-11e8-82c1-525500d15501 msg="No update processing required" level=error timestamp=2018-07-30T09:53:49.864406Z pos=vm.go:404 component=virt-handler namespace=kubevirt-test-default name=testvmildvx8 kind= uid=637e6173-93de-11e8-82c1-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmildvx8\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-07-30T09:53:49.864500Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmildvx8\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmildvx8" level=info timestamp=2018-07-30T09:53:49.864742Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmildvx8 kind= uid=637e6173-93de-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:53:49.868046Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildvx8 kind= uid=637e6173-93de-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:53:49.869764Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmildvx8 kind= uid=637e6173-93de-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:53:49.872810Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildvx8 kind= uid=637e6173-93de-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:53:49.890869Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmildvx8 kind= uid=637e6173-93de-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T09:53:49.891289Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmildvx8 kind= uid=637e6173-93de-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T09:53:49.891943Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmildvx8 kind= uid=637e6173-93de-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmildvx8" level=info timestamp=2018-07-30T09:53:50.061799Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmildvx8 kind= uid=637e6173-93de-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-ztw2p Pod phase: Running level=info timestamp=2018-07-30T09:38:02.603760Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:02.603828Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T09:38:02.624223Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:02.624696Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:38:02.639624Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:02.640340Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:38:02.642639Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:38:02.645617Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:38.264389Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T09:38:38.264703Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T09:38:38.265508Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvminzqq8" level=info timestamp=2018-07-30T09:38:38.312998Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:38.368992Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T09:38:38.369076Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T09:38:38.369267Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." • Failure [150.393 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With VirtualMachineInstance with two PVCs /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:266 should start vmi multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:278 Timed out after 120.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1065 ------------------------------ STEP: Starting and stopping the VirtualMachineInstance number of times STEP: Starting a VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance will start level=info timestamp=2018-07-30T09:53:20.012021Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmildvx8 kind=VirtualMachineInstance uid=637e6173-93de-11e8-82c1-525500d15501 msg="Created virtual machine pod virt-launcher-testvmildvx8-2jgpt" level=info timestamp=2018-07-30T09:53:48.732918Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmildvx8 kind=VirtualMachineInstance uid=637e6173-93de-11e8-82c1-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmildvx8-2jgpt" level=info timestamp=2018-07-30T09:53:49.832635Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmildvx8 kind=VirtualMachineInstance uid=637e6173-93de-11e8-82c1-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T09:53:49.843729Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmildvx8 kind=VirtualMachineInstance uid=637e6173-93de-11e8-82c1-525500d15501 msg="VirtualMachineInstance started." Pod name: disks-images-provider-g84sr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jj4df Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-grzfd Pod phase: Running 2018/07/30 09:56:31 http: TLS handshake error from 10.128.0.1:47290: EOF 2018/07/30 09:56:41 http: TLS handshake error from 10.128.0.1:47336: EOF 2018/07/30 09:56:51 http: TLS handshake error from 10.128.0.1:47382: EOF level=info timestamp=2018-07-30T09:56:54.241012Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:57:01 http: TLS handshake error from 10.128.0.1:47432: EOF 2018/07/30 09:57:11 http: TLS handshake error from 10.128.0.1:47478: EOF level=info timestamp=2018-07-30T09:57:21.255133Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T09:57:21.292613Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:57:21 http: TLS handshake error from 10.128.0.1:47532: EOF level=info timestamp=2018-07-30T09:57:24.215966Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:57:31 http: TLS handshake error from 10.128.0.1:47582: EOF 2018/07/30 09:57:41 http: TLS handshake error from 10.128.0.1:47628: EOF 2018/07/30 09:57:51 http: TLS handshake error from 10.128.0.1:47674: EOF level=info timestamp=2018-07-30T09:57:54.243788Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:58:01 http: TLS handshake error from 10.128.0.1:47724: EOF Pod name: virt-api-7d79764579-m89z6 Pod phase: Running 2018/07/30 09:57:28 http: TLS handshake error from 10.129.0.1:52708: EOF level=info timestamp=2018-07-30T09:57:31.039924Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:57:32.671570Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:57:35.481775Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:57:38.500014Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T09:57:38.501000Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/30 09:57:38 http: TLS handshake error from 10.129.0.1:52718: EOF level=info timestamp=2018-07-30T09:57:42.710570Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:57:44.154189Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:57:48 http: TLS handshake error from 10.129.0.1:52728: EOF level=info timestamp=2018-07-30T09:57:52.763670Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:57:58 http: TLS handshake error from 10.129.0.1:52738: EOF level=info timestamp=2018-07-30T09:58:01.115668Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:58:02.818746Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:58:05.547536Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-mvwrb Pod phase: Running level=info timestamp=2018-07-30T09:14:13.494977Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-rwnst Pod phase: Running level=info timestamp=2018-07-30T09:48:48.502727Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipnsvd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipnsvd" level=info timestamp=2018-07-30T09:49:40.230354Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2zvzs kind= uid=e08de305-93dd-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:49:40.230514Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2zvzs kind= uid=e08de305-93dd-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:50:28.770268Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5k94g kind= uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:50:28.770533Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5k94g kind= uid=fd7c5fc5-93dd-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:50:28.840182Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5k94g\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5k94g" level=info timestamp=2018-07-30T09:50:28.853176Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5k94g\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5k94g" level=info timestamp=2018-07-30T09:53:19.912208Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmildvx8 kind= uid=637e6173-93de-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:53:19.912362Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmildvx8 kind= uid=637e6173-93de-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:53:19.992135Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmildvx8\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmildvx8" level=info timestamp=2018-07-30T09:53:20.003582Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmildvx8\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmildvx8" level=info timestamp=2018-07-30T09:53:20.015431Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmildvx8\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmildvx8" level=info timestamp=2018-07-30T09:55:50.261100Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6stpj kind= uid=bd1adab6-93de-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:55:50.267492Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6stpj kind= uid=bd1adab6-93de-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:55:50.350738Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6stpj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6stpj" Pod name: virt-handler-2hh2l Pod phase: Running level=info timestamp=2018-07-30T09:56:05.612106Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi6stpj kind= uid=bd1adab6-93de-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:56:06.348656Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-30T09:56:06.349767Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmi6stpj kind=Domain uid=bd1adab6-93de-11e8-82c1-525500d15501 msg="Domain is in state Paused reason StartingUp" level=info timestamp=2018-07-30T09:56:06.658117Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:56:06.658922Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmi6stpj kind=Domain uid=bd1adab6-93de-11e8-82c1-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T09:56:06.691033Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:56:06.693127Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6stpj kind= uid=bd1adab6-93de-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:56:06.693366Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi6stpj kind= uid=bd1adab6-93de-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T09:56:06.731180Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6stpj kind= uid=bd1adab6-93de-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:56:06.732115Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi6stpj kind= uid=bd1adab6-93de-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:56:06.735833Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6stpj kind= uid=bd1adab6-93de-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:56:06.747916Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmi6stpj kind= uid=bd1adab6-93de-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T09:56:06.748125Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmi6stpj kind= uid=bd1adab6-93de-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T09:56:06.748480Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmi6stpj kind= uid=bd1adab6-93de-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmi6stpj" level=info timestamp=2018-07-30T09:56:06.796832Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6stpj kind= uid=bd1adab6-93de-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-ztw2p Pod phase: Running level=info timestamp=2018-07-30T09:38:02.603760Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:02.603828Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T09:38:02.624223Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:02.624696Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:38:02.639624Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:02.640340Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:38:02.642639Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:38:02.645617Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:38.264389Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T09:38:38.264703Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T09:38:38.265508Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvminzqq8" level=info timestamp=2018-07-30T09:38:38.312998Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind= uid=369bee5e-93dc-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:38:38.368992Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T09:38:38.369076Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T09:38:38.369267Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzqq8 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." • Failure [136.834 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting and stopping the same VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:90 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:91 should success multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:92 Timed out after 120.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1065 ------------------------------ STEP: Starting the VirtualMachineInstance level=info timestamp=2018-07-30T09:55:50.415488Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi6stpj kind=VirtualMachineInstance uid=bd1adab6-93de-11e8-82c1-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi6stpj-sf6pf" level=info timestamp=2018-07-30T09:56:05.582994Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi6stpj kind=VirtualMachineInstance uid=bd1adab6-93de-11e8-82c1-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi6stpj-sf6pf" level=info timestamp=2018-07-30T09:56:06.701171Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi6stpj kind=VirtualMachineInstance uid=bd1adab6-93de-11e8-82c1-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T09:56:06.712009Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi6stpj kind=VirtualMachineInstance uid=bd1adab6-93de-11e8-82c1-525500d15501 msg="VirtualMachineInstance started." STEP: Stopping the VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance is gone • [SLOW TEST:16.909 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:111 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:112 should not modify the spec on status update /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:113 ------------------------------ • [SLOW TEST:35.026 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting multiple VMIs /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:129 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:130 should success /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:131 ------------------------------ Pod name: disks-images-provider-g84sr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jj4df Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-grzfd Pod phase: Running level=info timestamp=2018-07-30T09:57:54.243788Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:58:01 http: TLS handshake error from 10.128.0.1:47724: EOF 2018/07/30 09:58:11 http: TLS handshake error from 10.128.0.1:47770: EOF 2018/07/30 09:58:21 http: TLS handshake error from 10.128.0.1:47818: EOF 2018/07/30 09:58:31 http: TLS handshake error from 10.128.0.1:47868: EOF 2018/07/30 09:58:41 http: TLS handshake error from 10.128.0.1:47914: EOF 2018/07/30 09:58:51 http: TLS handshake error from 10.128.0.1:47960: EOF level=info timestamp=2018-07-30T09:58:54.239158Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:59:01 http: TLS handshake error from 10.128.0.1:48010: EOF 2018/07/30 09:59:12 http: TLS handshake error from 10.128.0.1:48056: EOF 2018/07/30 09:59:21 http: TLS handshake error from 10.128.0.1:48102: EOF level=info timestamp=2018-07-30T09:59:25.220888Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 09:59:31 http: TLS handshake error from 10.128.0.1:48150: EOF 2018/07/30 09:59:42 http: TLS handshake error from 10.128.0.1:48182: EOF 2018/07/30 09:59:52 http: TLS handshake error from 10.128.0.1:48216: EOF Pod name: virt-api-7d79764579-m89z6 Pod phase: Running level=info timestamp=2018-07-30T09:59:06.142034Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:59:09 http: TLS handshake error from 10.129.0.1:52810: EOF level=info timestamp=2018-07-30T09:59:17.208880Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:59:17.221172Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:59:18 http: TLS handshake error from 10.129.0.1:52820: EOF 2018/07/30 09:59:29 http: TLS handshake error from 10.129.0.1:52830: EOF level=info timestamp=2018-07-30T09:59:33.135409Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:59:39 http: TLS handshake error from 10.129.0.1:52840: EOF 2018/07/30 09:59:49 http: TLS handshake error from 10.129.0.1:52850: EOF level=info timestamp=2018-07-30T09:59:58.560813Z pos=subresource.go:75 component=virt-api msg="Websocket connection upgraded" level=info timestamp=2018-07-30T09:59:58.847781Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 09:59:58 http: TLS handshake error from 10.129.0.1:52870: EOF level=info timestamp=2018-07-30T09:59:59.224808Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T09:59:59.323240Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T09:59:59.323731Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-mvwrb Pod phase: Running level=info timestamp=2018-07-30T09:14:13.494977Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-rwnst Pod phase: Running level=info timestamp=2018-07-30T09:58:24.083301Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvminjwnq kind= uid=18c5ebbb-93df-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:58:24.096087Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8t4kv kind= uid=18cd014c-93df-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:58:24.096167Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8t4kv kind= uid=18cd014c-93df-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:58:24.169858Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmik7ctd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmik7ctd" level=info timestamp=2018-07-30T09:59:00.437860Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibhn4g kind= uid=2e18c1a1-93df-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:59:00.438478Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibhn4g kind= uid=2e18c1a1-93df-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:59:00.443888Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv7wm6 kind= uid=2e39c2ad-93df-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:59:00.443930Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv7wm6 kind= uid=2e39c2ad-93df-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:59:00.444179Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz7jtl kind= uid=2e57b139-93df-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:59:00.444214Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz7jtl kind= uid=2e57b139-93df-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:59:00.474611Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitjhnx kind= uid=2e7313c2-93df-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T09:59:00.474723Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitjhnx kind= uid=2e7313c2-93df-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T09:59:00.584714Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv7wm6\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv7wm6" level=info timestamp=2018-07-30T09:59:00.585247Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz7jtl\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz7jtl" level=info timestamp=2018-07-30T09:59:01.450048Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitjhnx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitjhnx" Pod name: virt-handler-2hh2l Pod phase: Running level=info timestamp=2018-07-30T09:59:20.615897Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmiv7wm6 kind=Domain uid=2e39c2ad-93df-11e8-82c1-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T09:59:20.937024Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiv7wm6 kind= uid=2e39c2ad-93df-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:59:20.991174Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmiv7wm6 kind= uid=2e39c2ad-93df-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T09:59:20.991052Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T09:59:22.841266Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmibhn4g kind= uid=2e18c1a1-93df-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:59:22.896455Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmibhn4g kind= uid=2e18c1a1-93df-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T09:59:25.083834Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiv7wm6 kind= uid=2e39c2ad-93df-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:59:25.083993Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiv7wm6 kind= uid=2e39c2ad-93df-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:59:25.741179Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiv7wm6 kind= uid=2e39c2ad-93df-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=error timestamp=2018-07-30T09:59:25.907559Z pos=vm.go:404 component=virt-handler namespace=kubevirt-test-default name=testvmibhn4g kind= uid=2e18c1a1-93df-11e8-82c1-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibhn4g\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-07-30T09:59:26.085433Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibhn4g\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmibhn4g" level=info timestamp=2018-07-30T09:59:26.471484Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmibhn4g kind= uid=2e18c1a1-93df-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:59:26.808600Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmibhn4g kind= uid=2e18c1a1-93df-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:59:26.987830Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmibhn4g kind= uid=2e18c1a1-93df-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:59:27.165626Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmibhn4g kind= uid=2e18c1a1-93df-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-ztw2p Pod phase: Running level=info timestamp=2018-07-30T09:59:22.426166Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmitjhnx kind=Domain uid=2e7313c2-93df-11e8-82c1-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T09:59:22.534868Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitjhnx kind= uid=2e7313c2-93df-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:59:22.539885Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmitjhnx kind= uid=2e7313c2-93df-11e8-82c1-525500d15501 msg="No update processing required" level=error timestamp=2018-07-30T09:59:22.542242Z pos=vm.go:404 component=virt-handler namespace=kubevirt-test-default name=testvmiz7jtl kind= uid=2e57b139-93df-11e8-82c1-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz7jtl\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-07-30T09:59:22.544543Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz7jtl\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiz7jtl" level=info timestamp=2018-07-30T09:59:22.544907Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiz7jtl kind= uid=2e57b139-93df-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:59:22.549243Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiz7jtl kind= uid=2e57b139-93df-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:59:22.551639Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiz7jtl kind= uid=2e57b139-93df-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:59:22.555189Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiz7jtl kind= uid=2e57b139-93df-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=error timestamp=2018-07-30T09:59:24.853013Z pos=vm.go:404 component=virt-handler namespace=kubevirt-test-default name=testvmitjhnx kind= uid=2e7313c2-93df-11e8-82c1-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitjhnx\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-07-30T09:59:24.853093Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitjhnx\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmitjhnx" level=info timestamp=2018-07-30T09:59:24.853197Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmitjhnx kind= uid=2e7313c2-93df-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:59:25.052733Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitjhnx kind= uid=2e7313c2-93df-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T09:59:25.161536Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmitjhnx kind= uid=2e7313c2-93df-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T09:59:25.199934Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitjhnx kind= uid=2e7313c2-93df-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmibhn4g-lmqnv Pod phase: Running Pod name: virt-launcher-testvmitjhnx-qh958 Pod phase: Running Pod name: virt-launcher-testvmiv7wm6-qczrx Pod phase: Running Pod name: virt-launcher-testvmiz7jtl-r5wj8 Pod phase: Running • Failure in Spec Setup (BeforeEach) [62.623 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.errorString | 0xc420890780>: { s: "Timeout trying to connect to the virtual machine instance", } Timeout trying to connect to the virtual machine instance not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:94 ------------------------------ level=info timestamp=2018-07-30T09:59:01.395071Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmibhn4g kind=VirtualMachineInstance uid=2e18c1a1-93df-11e8-82c1-525500d15501 msg="Created virtual machine pod virt-launcher-testvmibhn4g-lmqnv" level=info timestamp=2018-07-30T09:59:18.841424Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmibhn4g kind=VirtualMachineInstance uid=2e18c1a1-93df-11e8-82c1-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmibhn4g-lmqnv" level=info timestamp=2018-07-30T09:59:22.727072Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmibhn4g kind=VirtualMachineInstance uid=2e18c1a1-93df-11e8-82c1-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T09:59:25.725799Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmibhn4g kind=VirtualMachineInstance uid=2e18c1a1-93df-11e8-82c1-525500d15501 msg="VirtualMachineInstance started." 2018/07/30 06:01:21 read closing down: EOF 2018/07/30 06:01:31 read closing down: EOF 2018/07/30 06:01:41 read closing down: EOF 2018/07/30 06:01:51 read closing down: EOF 2018/07/30 06:01:52 read closing down: EOF 2018/07/30 06:01:53 read closing down: EOF 2018/07/30 06:01:54 read closing down: EOF 2018/07/30 06:01:54 read closing down: EOF • [SLOW TEST:113.204 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance with pod network connectivity explicitly set /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ S [SKIPPING] [0.010 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance with custom MAC address [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Openshift detected: Custom MAC addresses on pod networks are not suppored /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1535 ------------------------------ 2018/07/30 06:01:55 read closing down: EOF •2018/07/30 06:01:56 read closing down: EOF 2018/07/30 06:01:56 read closing down: EOF • ------------------------------ • [SLOW TEST:5.096 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on a different node from Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ •• ------------------------------ • [SLOW TEST:5.303 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 with a service matching the vmi exposed /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:287 should be able to reach the vmi based on labels specified on the vmi /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:307 ------------------------------ • [SLOW TEST:5.361 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 with a service matching the vmi exposed /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:287 should fail to reach the vmi if an invalid servicename is used /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:318 ------------------------------ •2018/07/30 06:03:20 read closing down: EOF 2018/07/30 06:03:21 read closing down: EOF ------------------------------ • [SLOW TEST:56.724 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom interface model /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:383 should expose the right device type to the guest /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:384 ------------------------------ 2018/07/30 06:03:21 read closing down: EOF 2018/07/30 06:03:22 read closing down: EOF •2018/07/30 06:04:16 read closing down: EOF ------------------------------ • [SLOW TEST:54.618 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:417 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:418 ------------------------------ 2018/07/30 06:04:16 read closing down: EOF 2018/07/30 06:05:18 read closing down: EOF 2018/07/30 06:05:19 read closing down: EOF • [SLOW TEST:62.466 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address in non-conventional format /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:429 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:430 ------------------------------ 2018/07/30 06:06:20 read closing down: EOF • [SLOW TEST:61.399 seconds] 2018/07/30 06:06:20 read closing down: EOF Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address and slirp interface /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:442 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:443 ------------------------------ 2018/07/30 06:07:17 read closing down: EOF 2018/07/30 06:07:17 read closing down: EOF • [SLOW TEST:57.112 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with disabled automatic attachment of interfaces /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:455 should not configure any external interfaces /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:456 ------------------------------ volumedisk0 compute 2018/07/30 06:08:15 read closing down: EOF • [SLOW TEST:57.572 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with 3 CPU cores /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:56 should report 3 cpu cores under guest OS /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:62 ------------------------------ Pod name: disks-images-provider-g84sr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jj4df Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-grzfd Pod phase: Running 2018/07/30 10:07:19 http: response.WriteHeader on hijacked connection level=error timestamp=2018-07-30T10:07:19.655329Z pos=subresource.go:97 component=virt-api reason="read tcp 10.128.0.23:8443->10.128.0.1:35594: use of closed network connection" msg="error ecountered reading from websocket stream" level=info timestamp=2018-07-30T10:07:19.655466Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmivjsk9/console proto=HTTP/1.1 statusCode=200 contentLength=0 level=info timestamp=2018-07-30T10:07:21.583084Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 10:07:21 http: TLS handshake error from 10.128.0.1:50694: EOF 2018/07/30 10:07:31 http: TLS handshake error from 10.128.0.1:50832: EOF level=info timestamp=2018-07-30T10:07:38.577024Z pos=subresource.go:75 component=virt-api msg="Websocket connection upgraded" 2018/07/30 10:07:41 http: TLS handshake error from 10.128.0.1:50886: EOF 2018/07/30 10:07:51 http: TLS handshake error from 10.128.0.1:50932: EOF 2018/07/30 10:08:01 http: TLS handshake error from 10.128.0.1:50982: EOF 2018/07/30 10:08:11 http: TLS handshake error from 10.128.0.1:51028: EOF level=error timestamp=2018-07-30T10:08:15.203077Z pos=subresource.go:85 component=virt-api msg="connection failed: command terminated with exit code 137" 2018/07/30 10:08:15 http: response.WriteHeader on hijacked connection level=error timestamp=2018-07-30T10:08:15.203283Z pos=subresource.go:97 component=virt-api reason="read tcp 10.128.0.23:8443->10.128.0.1:37000: use of closed network connection" msg="error ecountered reading from websocket stream" level=info timestamp=2018-07-30T10:08:15.203355Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmib7gxp/console proto=HTTP/1.1 statusCode=500 contentLength=0 Pod name: virt-api-7d79764579-m89z6 Pod phase: Running level=info timestamp=2018-07-30T10:07:35.304160Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 10:07:38 http: TLS handshake error from 10.129.0.1:53364: EOF level=info timestamp=2018-07-30T10:07:42.496046Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T10:07:42.498546Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T10:07:44.386942Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 10:07:48 http: TLS handshake error from 10.129.0.1:53374: EOF level=info timestamp=2018-07-30T10:07:53.981981Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:07:54.451734Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:07:55.508743Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:07:58.449812Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 10:07:58 http: TLS handshake error from 10.129.0.1:53384: EOF level=info timestamp=2018-07-30T10:08:04.504447Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:08:05.366110Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 10:08:08 http: TLS handshake error from 10.129.0.1:53394: EOF level=info timestamp=2018-07-30T10:08:14.553596Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-mvwrb Pod phase: Running level=info timestamp=2018-07-30T09:14:13.494977Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-rwnst Pod phase: Running level=info timestamp=2018-07-30T10:04:16.764794Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7g8sm kind= uid=eb01ecf9-93df-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T10:04:16.853120Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7g8sm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7g8sm" level=info timestamp=2018-07-30T10:04:16.882987Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7g8sm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7g8sm" level=info timestamp=2018-07-30T10:05:19.236417Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwznx kind= uid=103d8b9e-93e0-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T10:05:19.236551Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwznx kind= uid=103d8b9e-93e0-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T10:05:19.376873Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifwznx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifwznx" level=info timestamp=2018-07-30T10:05:19.390279Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifwznx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifwznx" level=info timestamp=2018-07-30T10:06:20.630827Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4xvh2 kind= uid=34d6596a-93e0-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T10:06:20.631007Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4xvh2 kind= uid=34d6596a-93e0-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T10:06:20.759504Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi4xvh2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi4xvh2" level=info timestamp=2018-07-30T10:06:20.786170Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi4xvh2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi4xvh2" level=info timestamp=2018-07-30T10:07:19.602618Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib7gxp kind= uid=57fd24f2-93e0-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T10:07:19.602803Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib7gxp kind= uid=57fd24f2-93e0-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T10:08:15.538233Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizr4xf kind= uid=79542c01-93e0-11e8-82c1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T10:08:15.538418Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizr4xf kind= uid=79542c01-93e0-11e8-82c1-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-2hh2l Pod phase: Running level=info timestamp=2018-07-30T10:07:38.601408Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T10:07:38.622968Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib7gxp kind= uid=57fd24f2-93e0-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:07:38.624716Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T10:07:38.625645Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmib7gxp kind= uid=57fd24f2-93e0-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T10:07:38.626570Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmib7gxp kind=Domain uid=57fd24f2-93e0-11e8-82c1-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T10:07:38.638849Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib7gxp kind= uid=57fd24f2-93e0-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:07:38.639163Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmib7gxp kind= uid=57fd24f2-93e0-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T10:07:38.645425Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib7gxp kind= uid=57fd24f2-93e0-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:08:15.339055Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmib7gxp kind= uid=57fd24f2-93e0-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T10:08:15.339175Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmib7gxp kind= uid=57fd24f2-93e0-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T10:08:15.339665Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmib7gxp kind= uid=57fd24f2-93e0-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmib7gxp" level=info timestamp=2018-07-30T10:08:15.461255Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib7gxp kind= uid=57fd24f2-93e0-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:08:15.471700Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmib7gxp kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T10:08:15.471783Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmib7gxp kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T10:08:15.472026Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmib7gxp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-ztw2p Pod phase: Running level=info timestamp=2018-07-30T10:07:17.799776Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi4xvh2 kind= uid=34d6596a-93e0-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:07:17.879302Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmi7kjcs kind= uid=5408167f-93df-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T10:07:17.879391Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmi7kjcs kind= uid=5408167f-93df-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T10:07:17.890805Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmi7kjcs kind= uid=5408167f-93df-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmi7kjcs" level=info timestamp=2018-07-30T10:07:18.146680Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmi4xvh2 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T10:07:18.146767Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmi4xvh2 kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T10:07:18.147055Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi4xvh2 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:07:18.387077Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi7g8sm kind= uid=eb01ecf9-93df-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:07:18.387201Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmi7g8sm kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T10:07:18.387229Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmi7g8sm kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T10:07:18.387386Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi7g8sm kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:07:18.454035Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi7kjcs kind= uid=5408167f-93df-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:07:18.457358Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmi7kjcs kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T10:07:18.459223Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmi7kjcs kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T10:07:18.459582Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi7kjcs kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmib7gxp-2jtcs Pod phase: Running level=error timestamp=2018-07-30T10:08:15.461767Z pos=libvirt_helper.go:282 component=virt-launcher reason="signal: killed" msg="dirty virt-launcher shutdown" • Failure [1.465 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with namespace memory limits /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:107 should failed to schedule the pod, copy limits to vm spec [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:109 Expected : true not to be true /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:157 ------------------------------ STEP: Starting a VirtualMachineInstance • [SLOW TEST:16.843 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:164 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-2Mi /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ S [SKIPPING] [0.216 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:164 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-1Gi [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 No node with hugepages hugepages-1Gi capacity /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:216 ------------------------------ • ------------------------------ • [SLOW TEST:119.105 seconds] Configurations 2018/07/30 06:10:33 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:294 when CPU model defined /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:340 should report defined CPU model /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:341 ------------------------------ • [SLOW TEST:137.829 seconds] 2018/07/30 06:12:51 read closing down: EOF Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:294 when CPU model equals to passthrough /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:368 should report exactly the same model as node CPU /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:369 ------------------------------ 2018/07/30 06:14:55 read closing down: EOF • [SLOW TEST:123.235 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:294 when CPU model not defined /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:392 should report CPU model from libvirt capabilities /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:393 ------------------------------ • [SLOW TEST:52.806 seconds] 2018/07/30 06:15:47 read closing down: EOF Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 New VirtualMachineInstance with all supported drives /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:413 should have all the device nodes /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:436 ------------------------------ 2018/07/30 06:16:38 read closing down: EOF Service cluster-ip-vmi successfully exposed for virtualmachineinstance testvmihd55c • [SLOW TEST:53.739 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:61 Expose ClusterIP service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:68 Should expose a Cluster IP service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:71 ------------------------------ Service cluster-ip-target-vmi successfully exposed for virtualmachineinstance testvmihd55c •Service node-port-vmi successfully exposed for virtualmachineinstance testvmihd55c ------------------------------ • [SLOW TEST:10.202 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:61 Expose NodePort service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:124 Should expose a NodePort service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:129 ------------------------------ 2018/07/30 06:17:44 read closing down: EOF Service cluster-ip-udp-vmi successfully exposed for virtualmachineinstance testvmitlpz9 • [SLOW TEST:56.102 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose UDP service on a VMI /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:166 Expose ClusterIP UDP service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:173 Should expose a ClusterIP service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:177 ------------------------------ Service node-port-udp-vmi successfully exposed for virtualmachineinstance testvmitlpz9 • [SLOW TEST:9.283 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose UDP service on a VMI /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:166 Expose NodePort UDP service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:205 Should expose a NodePort service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:210 ------------------------------ 2018/07/30 06:18:48 read closing down: EOF 2018/07/30 06:18:59 read closing down: EOF Service cluster-ip-vmirs successfully exposed for vmirs replicaset55c4h • [SLOW TEST:66.322 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VMI replica set /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:253 Expose ClusterIP service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:286 Should create a ClusterIP service on VMRS and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:290 ------------------------------ Service cluster-ip-vm successfully exposed for virtualmachine testvmic85j8 VM testvmic85j8 was scheduled to start 2018/07/30 06:20:01 read closing down: EOF • [SLOW TEST:61.700 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on an VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:318 Expose ClusterIP service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:362 Connect to ClusterIP services that was set when VM was offline /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:363 ------------------------------ • [SLOW TEST:38.829 seconds] LeaderElection /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:43 Start a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:53 when the controller pod is not running /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:54 should success /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:55 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.004 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 should succeed to start a vmi [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:133 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1349 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.004 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 should succeed to stop a running vmi [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:139 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1349 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.004 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with winrm connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:150 should have correct UUID /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:192 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1349 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.004 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with winrm connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:150 should have pod IP /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:208 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1349 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.003 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with kubectl command [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:226 should succeed to start a vmi /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:242 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1349 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.003 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with kubectl command [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:226 should succeed to stop a vmi /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:250 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1349 ------------------------------ •• ------------------------------ • [SLOW TEST:17.161 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should update VirtualMachine once VMIs are up /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:195 ------------------------------ •• Pod name: disks-images-provider-g84sr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jj4df Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-grzfd Pod phase: Running 2018/07/30 10:23:41 http: TLS handshake error from 10.128.0.1:55820: EOF level=info timestamp=2018-07-30T10:23:50.013749Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 10:23:51 http: TLS handshake error from 10.128.0.1:55874: EOF 2018/07/30 10:24:01 http: TLS handshake error from 10.128.0.1:55924: EOF 2018/07/30 10:24:11 http: TLS handshake error from 10.128.0.1:55974: EOF level=info timestamp=2018-07-30T10:24:20.019014Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 10:24:21 http: TLS handshake error from 10.128.0.1:56028: EOF 2018/07/30 10:24:31 http: TLS handshake error from 10.128.0.1:56078: EOF 2018/07/30 10:24:41 http: TLS handshake error from 10.128.0.1:56128: EOF level=info timestamp=2018-07-30T10:24:50.048741Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 10:24:51 http: TLS handshake error from 10.128.0.1:56182: EOF 2018/07/30 10:25:01 http: TLS handshake error from 10.128.0.1:56232: EOF 2018/07/30 10:25:11 http: TLS handshake error from 10.128.0.1:56282: EOF level=info timestamp=2018-07-30T10:25:20.002102Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 10:25:21 http: TLS handshake error from 10.128.0.1:56336: EOF Pod name: virt-api-7d79764579-m89z6 Pod phase: Running 2018/07/30 10:24:28 http: TLS handshake error from 10.129.0.1:54308: EOF level=info timestamp=2018-07-30T10:24:31.722594Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 10:24:38 http: TLS handshake error from 10.129.0.1:54314: EOF level=info timestamp=2018-07-30T10:24:41.778377Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 10:24:48 http: TLS handshake error from 10.129.0.1:54320: EOF level=info timestamp=2018-07-30T10:24:51.829232Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:24:56.614569Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:24:56.667574Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:24:57.010308Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 10:24:58 http: TLS handshake error from 10.129.0.1:54326: EOF level=info timestamp=2018-07-30T10:25:01.877426Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 10:25:08 http: TLS handshake error from 10.129.0.1:54332: EOF level=info timestamp=2018-07-30T10:25:11.928630Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 10:25:18 http: TLS handshake error from 10.129.0.1:54338: EOF level=info timestamp=2018-07-30T10:25:21.975383Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-cjm2z Pod phase: Running level=info timestamp=2018-07-30T10:20:14.232830Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-mvwrb Pod phase: Running level=info timestamp=2018-07-30T10:21:06.202862Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44a86cc2-93e2-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:21:06.204385Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44a86cc2-93e2-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:21:06.204430Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44a86cc2-93e2-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:21:06.235943Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44a86cc2-93e2-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:21:06.236036Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44a86cc2-93e2-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:21:06.253058Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44a86cc2-93e2-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:21:06.253128Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44a86cc2-93e2-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:21:22.107708Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44a86cc2-93e2-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:21:22.107804Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44a86cc2-93e2-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:21:23.270789Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44a86cc2-93e2-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:21:23.270876Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44a86cc2-93e2-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:21:23.286011Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44a86cc2-93e2-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:21:23.286117Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44a86cc2-93e2-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:21:24.309536Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44a86cc2-93e2-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:21:24.310612Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44a86cc2-93e2-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" Pod name: virt-handler-2hh2l Pod phase: Running level=info timestamp=2018-07-30T10:21:22.109809Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44ab6698-93e2-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T10:21:22.876108Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-30T10:21:22.878505Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmiblqd9 kind=Domain uid=44ab6698-93e2-11e8-82c1-525500d15501 msg="Domain is in state Paused reason StartingUp" level=info timestamp=2018-07-30T10:21:23.204136Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T10:21:23.207784Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmiblqd9 kind=Domain uid=44ab6698-93e2-11e8-82c1-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T10:21:23.236135Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T10:21:23.239814Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44ab6698-93e2-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:21:23.240038Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44ab6698-93e2-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T10:21:23.264252Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44ab6698-93e2-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:21:23.264621Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44ab6698-93e2-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T10:21:23.270358Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44ab6698-93e2-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:21:24.297048Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44ab6698-93e2-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T10:21:24.297376Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44ab6698-93e2-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T10:21:24.298759Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44ab6698-93e2-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmiblqd9" level=info timestamp=2018-07-30T10:21:24.349304Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiblqd9 kind= uid=44ab6698-93e2-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-ztw2p Pod phase: Running level=info timestamp=2018-07-30T10:20:05.501688Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi455jjsw8h5 kind= uid=d416248f-93e1-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:20:05.528033Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmic85j8 kind= uid=fbb6f9b8-93e1-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:20:06.254101Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind= uid=ad1b34ee-93e1-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T10:20:06.256464Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind= uid=ad1b34ee-93e1-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T10:20:06.261032Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind= uid=ad1b34ee-93e1-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmitlpz9" level=info timestamp=2018-07-30T10:20:06.686050Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind= uid=ad1b34ee-93e1-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:20:07.501467Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmi455jjsw8h5 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T10:20:07.501606Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmi455jjsw8h5 kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T10:20:07.502510Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi455jjsw8h5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:20:07.575142Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmic85j8 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T10:20:07.575223Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmic85j8 kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T10:20:07.575368Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmic85j8 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:20:07.617574Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T10:20:07.617685Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T10:20:07.617933Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." ------------------------------ • Failure [258.497 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should recreate VirtualMachineInstance if it gets deleted [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:245 Timed out after 240.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:262 ------------------------------ STEP: Starting the VirtualMachineInstance STEP: VMI has the running condition • [SLOW TEST:46.508 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should recreate VirtualMachineInstance if the VirtualMachineInstance's pod gets deleted /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:265 ------------------------------ Pod name: disks-images-provider-g84sr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jj4df Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-grzfd Pod phase: Running level=info timestamp=2018-07-30T10:29:19.978961Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 10:29:21 http: TLS handshake error from 10.128.0.1:57584: EOF 2018/07/30 10:29:31 http: TLS handshake error from 10.128.0.1:57634: EOF 2018/07/30 10:29:41 http: TLS handshake error from 10.128.0.1:57684: EOF 2018/07/30 10:29:51 http: TLS handshake error from 10.128.0.1:57738: EOF 2018/07/30 10:30:01 http: TLS handshake error from 10.128.0.1:57788: EOF 2018/07/30 10:30:11 http: TLS handshake error from 10.128.0.1:57838: EOF 2018/07/30 10:30:21 http: TLS handshake error from 10.128.0.1:57892: EOF 2018/07/30 10:30:31 http: TLS handshake error from 10.128.0.1:57942: EOF 2018/07/30 10:30:41 http: TLS handshake error from 10.128.0.1:57998: EOF 2018/07/30 10:30:51 http: TLS handshake error from 10.128.0.1:58052: EOF 2018/07/30 10:31:01 http: TLS handshake error from 10.128.0.1:58102: EOF 2018/07/30 10:31:11 http: TLS handshake error from 10.128.0.1:58152: EOF level=info timestamp=2018-07-30T10:31:20.051065Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 10:31:21 http: TLS handshake error from 10.128.0.1:58206: EOF Pod name: virt-api-7d79764579-m89z6 Pod phase: Running level=info timestamp=2018-07-30T10:30:53.650391Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:30:57.239783Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:30:57.380753Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:30:57.683414Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 10:30:58 http: TLS handshake error from 10.129.0.1:54546: EOF level=info timestamp=2018-07-30T10:31:03.697964Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:31:04.026446Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T10:31:04.027822Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/30 10:31:08 http: TLS handshake error from 10.129.0.1:54552: EOF level=info timestamp=2018-07-30T10:31:13.750443Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 10:31:18 http: TLS handshake error from 10.129.0.1:54558: EOF level=info timestamp=2018-07-30T10:31:23.805854Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:31:27.289644Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:31:27.423517Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:31:27.770984Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-cjm2z Pod phase: Running level=info timestamp=2018-07-30T10:20:14.232830Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-mvwrb Pod phase: Running level=info timestamp=2018-07-30T10:26:11.239623Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigjlm6\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigjlm6" level=info timestamp=2018-07-30T10:26:11.254237Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa73b5d7-93e2-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:26:11.254300Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa73b5d7-93e2-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:26:11.256865Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigjlm6\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigjlm6" level=info timestamp=2018-07-30T10:26:26.823961Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa73b5d7-93e2-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:26:26.824085Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa73b5d7-93e2-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:26:27.943017Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa73b5d7-93e2-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:26:27.951995Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa73b5d7-93e2-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:26:27.968204Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa73b5d7-93e2-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:26:27.968450Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa73b5d7-93e2-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:26:28.279596Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa73b5d7-93e2-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:26:28.280025Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa73b5d7-93e2-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-30T10:26:28.294576Z pos=vm.go:322 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa73b5d7-93e2-11e8-82c1-525500d15501 msg="Dispatching delete event" level=info timestamp=2018-07-30T10:26:28.300582Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa73b5d7-93e2-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:26:28.300680Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa73b5d7-93e2-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: false" Pod name: virt-handler-2hh2l Pod phase: Running level=info timestamp=2018-07-30T10:26:27.895733Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa769854-93e2-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:26:27.896013Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa769854-93e2-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T10:26:27.920381Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T10:26:27.938298Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa769854-93e2-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:26:27.938731Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa769854-93e2-11e8-82c1-525500d15501 msg="No update processing required" level=error timestamp=2018-07-30T10:26:27.945413Z pos=vm.go:404 component=virt-handler namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa769854-93e2-11e8-82c1-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigjlm6\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-07-30T10:26:27.945608Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigjlm6\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmigjlm6" level=info timestamp=2018-07-30T10:26:27.945837Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa769854-93e2-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T10:26:27.949564Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa769854-93e2-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:26:27.951447Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa769854-93e2-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T10:26:27.955755Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa769854-93e2-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:26:28.295896Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa769854-93e2-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T10:26:28.295981Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa769854-93e2-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T10:26:28.296426Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa769854-93e2-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmigjlm6" level=info timestamp=2018-07-30T10:26:28.339712Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmigjlm6 kind= uid=fa769854-93e2-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-ztw2p Pod phase: Running level=info timestamp=2018-07-30T10:20:05.501688Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi455jjsw8h5 kind= uid=d416248f-93e1-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:20:05.528033Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmic85j8 kind= uid=fbb6f9b8-93e1-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:20:06.254101Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind= uid=ad1b34ee-93e1-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T10:20:06.256464Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind= uid=ad1b34ee-93e1-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T10:20:06.261032Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind= uid=ad1b34ee-93e1-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmitlpz9" level=info timestamp=2018-07-30T10:20:06.686050Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind= uid=ad1b34ee-93e1-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:20:07.501467Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmi455jjsw8h5 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T10:20:07.501606Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmi455jjsw8h5 kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T10:20:07.502510Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi455jjsw8h5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:20:07.575142Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmic85j8 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T10:20:07.575223Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmic85j8 kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T10:20:07.575368Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmic85j8 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:20:07.617574Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T10:20:07.617685Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T10:20:07.617933Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." • Failure [317.464 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should stop VirtualMachineInstance if running set to false [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:325 Timed out after 300.000s. The vmi did not disappear Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:183 ------------------------------ STEP: Starting the VirtualMachineInstance STEP: VMI has the running condition STEP: Stopping the VirtualMachineInstance Pod name: disks-images-provider-g84sr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jj4df Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-grzfd Pod phase: Running 2018/07/30 10:34:51 http: TLS handshake error from 10.128.0.1:59286: EOF 2018/07/30 10:35:01 http: TLS handshake error from 10.128.0.1:59338: EOF 2018/07/30 10:35:11 http: TLS handshake error from 10.128.0.1:59388: EOF level=info timestamp=2018-07-30T10:35:20.028775Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 10:35:21 http: TLS handshake error from 10.128.0.1:59442: EOF 2018/07/30 10:35:31 http: TLS handshake error from 10.128.0.1:59492: EOF 2018/07/30 10:35:41 http: TLS handshake error from 10.128.0.1:59542: EOF level=info timestamp=2018-07-30T10:35:49.996970Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 10:35:51 http: TLS handshake error from 10.128.0.1:59596: EOF 2018/07/30 10:36:01 http: TLS handshake error from 10.128.0.1:59646: EOF 2018/07/30 10:36:11 http: TLS handshake error from 10.128.0.1:59696: EOF level=info timestamp=2018-07-30T10:36:20.033711Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 10:36:21 http: TLS handshake error from 10.128.0.1:59750: EOF 2018/07/30 10:36:31 http: TLS handshake error from 10.128.0.1:59800: EOF 2018/07/30 10:36:41 http: TLS handshake error from 10.128.0.1:59850: EOF Pod name: virt-api-7d79764579-m89z6 Pod phase: Running 2018/07/30 10:35:58 http: TLS handshake error from 10.129.0.1:54730: EOF level=info timestamp=2018-07-30T10:36:04.285116Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T10:36:04.286464Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T10:36:05.149913Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 10:36:08 http: TLS handshake error from 10.129.0.1:54736: EOF level=info timestamp=2018-07-30T10:36:15.195276Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 10:36:18 http: TLS handshake error from 10.129.0.1:54742: EOF level=info timestamp=2018-07-30T10:36:25.236180Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:36:27.999923Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:36:28.096708Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:36:28.457860Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 10:36:28 http: TLS handshake error from 10.129.0.1:54748: EOF level=info timestamp=2018-07-30T10:36:35.283020Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 10:36:38 http: TLS handshake error from 10.129.0.1:54754: EOF level=info timestamp=2018-07-30T10:36:45.340680Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-cjm2z Pod phase: Running level=info timestamp=2018-07-30T10:20:14.232830Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-mvwrb Pod phase: Running level=info timestamp=2018-07-30T10:31:28.738812Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7acc872-93e3-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:31:28.738926Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7acc872-93e3-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:31:28.748035Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7acc872-93e3-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:31:28.748139Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7acc872-93e3-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:31:44.725182Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7acc872-93e3-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:31:44.725255Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7acc872-93e3-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:31:45.814715Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7acc872-93e3-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:31:45.814827Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7acc872-93e3-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:31:45.825277Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7acc872-93e3-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:31:45.825504Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7acc872-93e3-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:31:46.749844Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7acc872-93e3-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:31:46.755403Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7acc872-93e3-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-30T10:31:46.764976Z pos=vm.go:322 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7acc872-93e3-11e8-82c1-525500d15501 msg="Dispatching delete event" level=info timestamp=2018-07-30T10:31:46.766283Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7acc872-93e3-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:31:46.766439Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7acc872-93e3-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: false" Pod name: virt-handler-2hh2l Pod phase: Running level=info timestamp=2018-07-30T10:31:44.726641Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7b1f499-93e3-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T10:31:45.461192Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-30T10:31:45.461404Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmil5mnn kind=Domain uid=b7b1f499-93e3-11e8-82c1-525500d15501 msg="Domain is in state Paused reason StartingUp" level=info timestamp=2018-07-30T10:31:45.758013Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T10:31:45.762965Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmil5mnn kind=Domain uid=b7b1f499-93e3-11e8-82c1-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T10:31:45.786638Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T10:31:45.788301Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7b1f499-93e3-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:31:45.788444Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7b1f499-93e3-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T10:31:45.816134Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7b1f499-93e3-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:31:45.817087Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7b1f499-93e3-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T10:31:45.821233Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7b1f499-93e3-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:31:46.768008Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7b1f499-93e3-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T10:31:46.768382Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7b1f499-93e3-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T10:31:46.769008Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7b1f499-93e3-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmil5mnn" level=info timestamp=2018-07-30T10:31:46.794635Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmil5mnn kind= uid=b7b1f499-93e3-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-ztw2p Pod phase: Running level=info timestamp=2018-07-30T10:20:05.501688Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi455jjsw8h5 kind= uid=d416248f-93e1-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:20:05.528033Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmic85j8 kind= uid=fbb6f9b8-93e1-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:20:06.254101Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind= uid=ad1b34ee-93e1-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T10:20:06.256464Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind= uid=ad1b34ee-93e1-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T10:20:06.261032Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind= uid=ad1b34ee-93e1-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmitlpz9" level=info timestamp=2018-07-30T10:20:06.686050Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind= uid=ad1b34ee-93e1-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:20:07.501467Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmi455jjsw8h5 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T10:20:07.501606Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmi455jjsw8h5 kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T10:20:07.502510Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi455jjsw8h5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:20:07.575142Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmic85j8 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T10:20:07.575223Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmic85j8 kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T10:20:07.575368Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmic85j8 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:20:07.617574Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T10:20:07.617685Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T10:20:07.617933Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." • Failure [318.475 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should start and stop VirtualMachineInstance multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:333 Timed out after 300.000s. The vmi did not disappear Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:183 ------------------------------ STEP: Doing run: 0 STEP: Starting the VirtualMachineInstance STEP: VMI has the running condition STEP: Stopping the VirtualMachineInstance Pod name: disks-images-provider-g84sr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-jj4df Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-grzfd Pod phase: Running level=info timestamp=2018-07-30T10:39:49.998885Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 10:39:51 http: TLS handshake error from 10.128.0.1:60838: EOF 2018/07/30 10:40:01 http: TLS handshake error from 10.128.0.1:60888: EOF 2018/07/30 10:40:11 http: TLS handshake error from 10.128.0.1:60938: EOF 2018/07/30 10:40:21 http: TLS handshake error from 10.128.0.1:60992: EOF 2018/07/30 10:40:31 http: TLS handshake error from 10.128.0.1:32810: EOF 2018/07/30 10:40:41 http: TLS handshake error from 10.128.0.1:32866: EOF 2018/07/30 10:40:51 http: TLS handshake error from 10.128.0.1:32922: EOF 2018/07/30 10:41:01 http: TLS handshake error from 10.128.0.1:32974: EOF 2018/07/30 10:41:11 http: TLS handshake error from 10.128.0.1:33024: EOF 2018/07/30 10:41:21 http: TLS handshake error from 10.128.0.1:33078: EOF 2018/07/30 10:41:31 http: TLS handshake error from 10.128.0.1:33128: EOF 2018/07/30 10:41:41 http: TLS handshake error from 10.128.0.1:33178: EOF 2018/07/30 10:41:51 http: TLS handshake error from 10.128.0.1:33232: EOF 2018/07/30 10:42:01 http: TLS handshake error from 10.128.0.1:33282: EOF Pod name: virt-api-7d79764579-m89z6 Pod phase: Running level=info timestamp=2018-07-30T10:41:26.753720Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:41:28.543000Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:41:28.693441Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 10:41:28 http: TLS handshake error from 10.129.0.1:54932: EOF level=info timestamp=2018-07-30T10:41:29.220964Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:41:36.795053Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 10:41:38 http: TLS handshake error from 10.129.0.1:54938: EOF level=info timestamp=2018-07-30T10:41:46.843722Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 10:41:48 http: TLS handshake error from 10.129.0.1:54944: EOF level=info timestamp=2018-07-30T10:41:50.283023Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T10:41:56.895000Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:41:58.602124Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T10:41:58.746734Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 10:41:58 http: TLS handshake error from 10.129.0.1:54950: EOF level=info timestamp=2018-07-30T10:41:59.274856Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-cjm2z Pod phase: Running level=info timestamp=2018-07-30T10:20:14.232830Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-mvwrb Pod phase: Running level=info timestamp=2018-07-30T10:36:47.170680Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=757fe0ff-93e4-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:36:47.170794Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=757fe0ff-93e4-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:37:03.160961Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=757fe0ff-93e4-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:37:03.161041Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=757fe0ff-93e4-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:37:04.263710Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=757fe0ff-93e4-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:37:04.264095Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=757fe0ff-93e4-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:37:04.278588Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=757fe0ff-93e4-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:37:04.278987Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=757fe0ff-93e4-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:37:05.199873Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=757fe0ff-93e4-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:37:05.206410Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=757fe0ff-93e4-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T10:37:05.214482Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=757fe0ff-93e4-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:37:05.214620Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=757fe0ff-93e4-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-30T10:37:05.226410Z pos=vm.go:322 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=757fe0ff-93e4-11e8-82c1-525500d15501 msg="Dispatching delete event" level=info timestamp=2018-07-30T10:37:05.233442Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=757fe0ff-93e4-11e8-82c1-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T10:37:05.242900Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=757fe0ff-93e4-11e8-82c1-525500d15501 msg="Creating or the VirtualMachineInstance: false" Pod name: virt-handler-2hh2l Pod phase: Running level=info timestamp=2018-07-30T10:37:04.239992Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=75811b2e-93e4-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:37:04.240361Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=75811b2e-93e4-11e8-82c1-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T10:37:04.242797Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T10:37:04.259262Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=75811b2e-93e4-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:37:04.259779Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=75811b2e-93e4-11e8-82c1-525500d15501 msg="No update processing required" level=error timestamp=2018-07-30T10:37:04.269835Z pos=vm.go:404 component=virt-handler namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=75811b2e-93e4-11e8-82c1-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5cbz8\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-07-30T10:37:04.270123Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5cbz8\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi5cbz8" level=info timestamp=2018-07-30T10:37:04.270381Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=75811b2e-93e4-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T10:37:04.273848Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=75811b2e-93e4-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:37:04.275931Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=75811b2e-93e4-11e8-82c1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T10:37:04.282085Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=75811b2e-93e4-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:37:05.225359Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=75811b2e-93e4-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T10:37:05.225953Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=75811b2e-93e4-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T10:37:05.227289Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=75811b2e-93e4-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmi5cbz8" level=info timestamp=2018-07-30T10:37:05.264688Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5cbz8 kind= uid=75811b2e-93e4-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-ztw2p Pod phase: Running level=info timestamp=2018-07-30T10:20:05.501688Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi455jjsw8h5 kind= uid=d416248f-93e1-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:20:05.528033Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmic85j8 kind= uid=fbb6f9b8-93e1-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:20:06.254101Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind= uid=ad1b34ee-93e1-11e8-82c1-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T10:20:06.256464Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind= uid=ad1b34ee-93e1-11e8-82c1-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T10:20:06.261032Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind= uid=ad1b34ee-93e1-11e8-82c1-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmitlpz9" level=info timestamp=2018-07-30T10:20:06.686050Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind= uid=ad1b34ee-93e1-11e8-82c1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:20:07.501467Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmi455jjsw8h5 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T10:20:07.501606Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmi455jjsw8h5 kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T10:20:07.502510Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi455jjsw8h5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:20:07.575142Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmic85j8 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T10:20:07.575223Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmic85j8 kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T10:20:07.575368Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmic85j8 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T10:20:07.617574Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-30T10:20:07.617685Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-30T10:20:07.617933Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitlpz9 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." • Failure [318.465 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should not update the VirtualMachineInstance spec if Running [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:346 Timed out after 300.000s. The vmi did not disappear Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:183 ------------------------------ STEP: Updating the VMI template spec STEP: Expecting the old VirtualMachineInstance spec still running STEP: Restarting the VMI STEP: Stopping the VirtualMachineInstance panic: test timed out after 1h30m0s goroutine 7639 [running]: testing.(*M).startAlarm.func1() /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1240 +0xfc created by time.goFunc /gimme/.gimme/versions/go1.10.linux.amd64/src/time/sleep.go:172 +0x44 goroutine 1 [chan receive, 90 minutes]: testing.(*T).Run(0xc420623e00, 0x139eaef, 0x9, 0x1430f68, 0x4801e6) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:825 +0x301 testing.runTests.func1(0xc420623d10) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1063 +0x64 testing.tRunner(0xc420623d10, 0xc420819df8) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:777 +0xd0 testing.runTests(0xc42071ada0, 0x1d32a50, 0x1, 0x1, 0x412009) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1061 +0x2c4 testing.(*M).Run(0xc42063ec80, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:978 +0x171 main.main() _testmain.go:44 +0x151 goroutine 5 [chan receive]: kubevirt.io/kubevirt/vendor/github.com/golang/glog.(*loggingT).flushDaemon(0x1d5e280) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/golang/glog/glog.go:879 +0x8b created by kubevirt.io/kubevirt/vendor/github.com/golang/glog.init.0 /root/go/src/kubevirt.io/kubevirt/vendor/github.com/golang/glog/glog.go:410 +0x203 goroutine 6 [syscall, 90 minutes]: os/signal.signal_recv(0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/sigqueue.go:139 +0xa6 os/signal.loop() /gimme/.gimme/versions/go1.10.linux.amd64/src/os/signal/signal_unix.go:22 +0x22 created by os/signal.init.0 /gimme/.gimme/versions/go1.10.linux.amd64/src/os/signal/signal_unix.go:28 +0x41 goroutine 10 [select]: kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion.(*AsyncAssertion).match(0xc420d1a8c0, 0x14c4ce0, 0x1d7c938, 0x412801, 0xc420572100, 0x1, 0x1, 0xc420572100) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion/async_assertion.go:139 +0x2e6 kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion.(*AsyncAssertion).Should(0xc420d1a8c0, 0x14c4ce0, 0x1d7c938, 0xc420572100, 0x1, 0x1, 0xc420d1a8c0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion/async_assertion.go:48 +0x62 kubevirt.io/kubevirt/tests_test.glob..func13.3.12() /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:429 +0x163 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*runner).runSync(0xc42072daa0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/runner.go:113 +0x9c kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*runner).run(0xc42072daa0, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/runner.go:64 +0x13e kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*ItNode).Run(0xc42000cc20, 0x14b6f60, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/it_node.go:26 +0x7f kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec.(*Spec).runSample(0xc420662a50, 0x0, 0x14b6f60, 0xc420059500) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec/spec.go:203 +0x648 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec.(*Spec).Run(0xc420662a50, 0x14b6f60, 0xc420059500) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec/spec.go:138 +0xff kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).runSpec(0xc42032c640, 0xc420662a50, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:200 +0x10d kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).runSpecs(0xc42032c640, 0x1) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:170 +0x329 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).Run(0xc42032c640, 0xb) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:66 +0x11b kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/suite.(*Suite).Run(0xc4200ceaf0, 0x7f82bb1d7fd0, 0xc420623e00, 0x13a10d2, 0xb, 0xc42071ade0, 0x2, 0x2, 0x14d3820, 0xc420059500, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/suite/suite.go:62 +0x27c kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo.RunSpecsWithCustomReporters(0x14b7fc0, 0xc420623e00, 0x13a10d2, 0xb, 0xc42071adc0, 0x2, 0x2, 0x2) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/ginkgo_dsl.go:221 +0x258 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo.RunSpecsWithDefaultAndCustomReporters(0x14b7fc0, 0xc420623e00, 0x13a10d2, 0xb, 0xc4204b7fa0, 0x1, 0x1, 0x1) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/ginkgo_dsl.go:209 +0xab kubevirt.io/kubevirt/tests_test.TestTests(0xc420623e00) /root/go/src/kubevirt.io/kubevirt/tests/tests_suite_test.go:43 +0xaa testing.tRunner(0xc420623e00, 0x1430f68) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:777 +0xd0 created by testing.(*T).Run /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:824 +0x2e0 goroutine 11 [chan receive, 90 minutes]: kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).registerForInterrupts(0xc42032c640, 0xc420260180) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:223 +0xd1 created by kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).Run /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:60 +0x88 goroutine 12 [select, 90 minutes, locked to thread]: runtime.gopark(0x1433140, 0x0, 0x139b611, 0x6, 0x18, 0x1) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/proc.go:291 +0x11a runtime.selectgo(0xc42047d750, 0xc420260300) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/select.go:392 +0xe50 runtime.ensureSigM.func1() /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/signal_unix.go:549 +0x1f4 runtime.goexit() /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/asm_amd64.s:2361 +0x1 goroutine 15 [IO wait]: internal/poll.runtime_pollWait(0x7f82bb1c4f00, 0x72, 0xc4203a9850) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/netpoll.go:173 +0x57 internal/poll.(*pollDesc).wait(0xc4207b0398, 0x72, 0xffffffffffffff00, 0x14b9180, 0x1c497d0) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_poll_runtime.go:85 +0x9b internal/poll.(*pollDesc).waitRead(0xc4207b0398, 0xc42086e000, 0x8000, 0x8000) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_poll_runtime.go:90 +0x3d internal/poll.(*FD).Read(0xc4207b0380, 0xc42086e000, 0x8000, 0x8000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_unix.go:157 +0x17d net.(*netFD).Read(0xc4207b0380, 0xc42086e000, 0x8000, 0x8000, 0x0, 0x8, 0x7ffb) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/fd_unix.go:202 +0x4f net.(*conn).Read(0xc4201244c0, 0xc42086e000, 0x8000, 0x8000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/net.go:176 +0x6a crypto/tls.(*block).readFromUntil(0xc4200ff3e0, 0x7f82bb1d7ff0, 0xc4201244c0, 0x5, 0xc4201244c0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:493 +0x96 crypto/tls.(*Conn).readRecord(0xc4203b6e00, 0x1433217, 0xc4203b6f20, 0x20) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:595 +0xe0 crypto/tls.(*Conn).Read(0xc4203b6e00, 0xc4206af000, 0x1000, 0x1000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:1156 +0x100 bufio.(*Reader).Read(0xc4205416e0, 0xc42078ec78, 0x9, 0x9, 0xc420e0a178, 0xc420ac6d40, 0xc4203a9d10) /gimme/.gimme/versions/go1.10.linux.amd64/src/bufio/bufio.go:216 +0x238 io.ReadAtLeast(0x14b5d60, 0xc4205416e0, 0xc42078ec78, 0x9, 0x9, 0x9, 0xc4203a9ce0, 0xc4203a9ce0, 0x406614) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/io.go:309 +0x86 io.ReadFull(0x14b5d60, 0xc4205416e0, 0xc42078ec78, 0x9, 0x9, 0xc420e0a120, 0xc4203a9d10, 0xc400002d01) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/io.go:327 +0x58 kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.readFrameHeader(0xc42078ec78, 0x9, 0x9, 0x14b5d60, 0xc4205416e0, 0x0, 0xc400000000, 0x7efa2d, 0xc4203a9fb0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/frame.go:237 +0x7b kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*Framer).ReadFrame(0xc42078ec40, 0xc42075af60, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/frame.go:492 +0xa4 kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*clientConnReadLoop).run(0xc4203a9fb0, 0x1431ec0, 0xc4204777b0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:1428 +0x8e kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*ClientConn).readLoop(0xc4205f0680) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:1354 +0x76 created by kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*Transport).newClientConn /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:579 +0x651 goroutine 7488 [select, 2 minutes]: io.(*pipe).Read(0xc4208130e0, 0xc420ba6000, 0x8000, 0x8000, 0x0, 0x14bb0c0, 0xc420698270) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/pipe.go:50 +0x115 io.(*PipeReader).Read(0xc420125d40, 0xc420ba6000, 0x8000, 0x8000, 0xe, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/pipe.go:127 +0x4c io.copyBuffer(0x14b6220, 0xc420125d98, 0x14b6140, 0xc420125d40, 0xc420ba6000, 0x8000, 0x8000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/io.go:400 +0x164 io.Copy(0x14b6220, 0xc420125d98, 0x14b6140, 0xc420125d40, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/io.go:362 +0x5a kubevirt.io/kubevirt/pkg/kubecli.(*wsStreamer).Stream.func1(0xc420125d98, 0x14b6140, 0xc420125d40, 0x14b6160, 0xc420125d88, 0xc4209b9da0) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:233 +0x4b created by kubevirt.io/kubevirt/pkg/kubecli.(*wsStreamer).Stream /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:232 +0xda goroutine 7484 [chan receive, 4 minutes]: kubevirt.io/kubevirt/pkg/kubecli.(*wsStreamer).Stream(0xc42069a410, 0x14b6140, 0xc420125d40, 0x14b6160, 0xc420125d88, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:243 +0x17d kubevirt.io/kubevirt/tests.NewConsoleExpecter.func1(0xc4208696e0, 0x14b6280, 0xc42069a410, 0xc420125d40, 0xc420125d88) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1146 +0x61 created by kubevirt.io/kubevirt/tests.NewConsoleExpecter /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1145 +0x401 goroutine 306 [chan send, 84 minutes]: kubevirt.io/kubevirt/tests_test.glob..func23.1.2.1.1(0x14f1520, 0xc4207bdbc0, 0xc4201240e0, 0xc4200498c0, 0xc4204e8b90, 0xc4204e8c18) /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:81 +0x138 created by kubevirt.io/kubevirt/tests_test.glob..func23.1.2.1 /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:73 +0x386 goroutine 3065 [chan receive, 64 minutes]: kubevirt.io/kubevirt/pkg/kubecli.(*asyncWSRoundTripper).WebsocketCallback(0xc42069ba60, 0xc42032cb40, 0xc420130630, 0x0, 0x0, 0x18, 0xc420c87ec8) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:163 +0x32b kubevirt.io/kubevirt/pkg/kubecli.(*asyncWSRoundTripper).WebsocketCallback-fm(0xc42032cb40, 0xc420130630, 0x0, 0x0, 0xc42032cb40, 0xc420130630) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:313 +0x52 kubevirt.io/kubevirt/pkg/kubecli.(*WebsocketRoundTripper).RoundTrip(0xc42069bf40, 0xc420107100, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:142 +0xab kubevirt.io/kubevirt/pkg/kubecli.(*vmis).asyncSubresourceHelper.func1(0x14b6240, 0xc42069bf40, 0xc420107100, 0xc420a8b200) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:328 +0x56 created by kubevirt.io/kubevirt/pkg/kubecli.(*vmis).asyncSubresourceHelper /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:326 +0x33a goroutine 7485 [chan receive, 4 minutes]: kubevirt.io/kubevirt/tests.NewConsoleExpecter.func2(0xc400000010, 0xc42079ef40) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1156 +0x3c kubevirt.io/kubevirt/vendor/github.com/google/goexpect.(*GExpect).waitForSession(0xc4207a50e0, 0xc4209b9d40, 0xc42069a420, 0x14baa40, 0xc420125d60, 0x14b6140, 0xc420125d80, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/google/goexpect/expect.go:1044 +0x16e created by kubevirt.io/kubevirt/vendor/github.com/google/goexpect.SpawnGeneric /root/go/src/kubevirt.io/kubevirt/vendor/github.com/google/goexpect/expect.go:805 +0x299 goroutine 7483 [chan receive, 4 minutes]: kubevirt.io/kubevirt/pkg/kubecli.(*asyncWSRoundTripper).WebsocketCallback(0xc420b0ef90, 0xc42061b400, 0xc420a51c20, 0x0, 0x0, 0x18, 0xc420d13ec8) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:163 +0x32b kubevirt.io/kubevirt/pkg/kubecli.(*asyncWSRoundTripper).WebsocketCallback-fm(0xc42061b400, 0xc420a51c20, 0x0, 0x0, 0xc42061b400, 0xc420a51c20) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:313 +0x52 kubevirt.io/kubevirt/pkg/kubecli.(*WebsocketRoundTripper).RoundTrip(0xc420b0f4b0, 0xc420b51b00, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:142 +0xab kubevirt.io/kubevirt/pkg/kubecli.(*vmis).asyncSubresourceHelper.func1(0x14b6240, 0xc420b0f4b0, 0xc420b51b00, 0xc4209b99e0) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:328 +0x56 created by kubevirt.io/kubevirt/pkg/kubecli.(*vmis).asyncSubresourceHelper /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:326 +0x33a goroutine 4628 [chan receive, 46 minutes]: kubevirt.io/kubevirt/pkg/kubecli.(*asyncWSRoundTripper).WebsocketCallback(0xc420968c50, 0xc420e06640, 0xc420a50630, 0x0, 0x0, 0x18, 0xc420ba7ec8) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:163 +0x32b kubevirt.io/kubevirt/pkg/kubecli.(*asyncWSRoundTripper).WebsocketCallback-fm(0xc420e06640, 0xc420a50630, 0x0, 0x0, 0xc420e06640, 0xc420a50630) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:313 +0x52 kubevirt.io/kubevirt/pkg/kubecli.(*WebsocketRoundTripper).RoundTrip(0xc420969c20, 0xc420abcb00, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:142 +0xab kubevirt.io/kubevirt/pkg/kubecli.(*vmis).asyncSubresourceHelper.func1(0x14b6240, 0xc420969c20, 0xc420abcb00, 0xc420b52840) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:328 +0x56 created by kubevirt.io/kubevirt/pkg/kubecli.(*vmis).asyncSubresourceHelper /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:326 +0x33a goroutine 7487 [select, 2 minutes]: io.(*pipe).Read(0xc420813130, 0xc420bf0000, 0x2000, 0x2000, 0xc420bf0000, 0xa1510c, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/pipe.go:50 +0x115 io.(*PipeReader).Read(0xc420125d80, 0xc420bf0000, 0x2000, 0x2000, 0x1, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/pipe.go:127 +0x4c kubevirt.io/kubevirt/vendor/github.com/google/goexpect.(*GExpect).waitForSession.func2(0x14b6140, 0xc420125d80) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/google/goexpect/expect.go:1020 +0xdb created by kubevirt.io/kubevirt/vendor/github.com/google/goexpect.(*GExpect).waitForSession /root/go/src/kubevirt.io/kubevirt/vendor/github.com/google/goexpect/expect.go:1039 +0x154 goroutine 7489 [IO wait, 2 minutes]: internal/poll.runtime_pollWait(0x7f82bb1c4af0, 0x72, 0xc42098d880) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/netpoll.go:173 +0x57 internal/poll.(*pollDesc).wait(0xc420d5aa18, 0x72, 0xffffffffffffff00, 0x14b9180, 0x1c497d0) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_poll_runtime.go:85 +0x9b internal/poll.(*pollDesc).waitRead(0xc420d5aa18, 0xc420b62800, 0x800, 0x800) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_poll_runtime.go:90 +0x3d internal/poll.(*FD).Read(0xc420d5aa00, 0xc420b62800, 0x800, 0x800, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_unix.go:157 +0x17d net.(*netFD).Read(0xc420d5aa00, 0xc420b62800, 0x800, 0x800, 0x0, 0x8, 0x7fb) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/fd_unix.go:202 +0x4f net.(*conn).Read(0xc420125d90, 0xc420b62800, 0x800, 0x800, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/net.go:176 +0x6a crypto/tls.(*block).readFromUntil(0xc4207ec600, 0x7f82bb1d7ff0, 0xc420125d90, 0x5, 0xc420125d90, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:493 +0x96 crypto/tls.(*Conn).readRecord(0xc420454000, 0x1433217, 0xc420454120, 0x2) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:595 +0xe0 crypto/tls.(*Conn).Read(0xc420454000, 0xc420aca800, 0x2800, 0x2800, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:1156 +0x100 bufio.(*Reader).fill(0xc4209b9ce0) /gimme/.gimme/versions/go1.10.linux.amd64/src/bufio/bufio.go:100 +0x11e bufio.(*Reader).Peek(0xc4209b9ce0, 0x2, 0xc4200ac400, 0x13a25d5, 0xc, 0xc42005f800, 0xc42005ea00) /gimme/.gimme/versions/go1.10.linux.amd64/src/bufio/bufio.go:132 +0x3a kubevirt.io/kubevirt/vendor/github.com/gorilla/websocket.(*Conn).read(0xc42061b400, 0x2, 0xc42005ea00, 0xc420da0098, 0xc420da00a0, 0xc420820480, 0xc420d28508) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/gorilla/websocket/conn_read.go:12 +0x40 kubevirt.io/kubevirt/vendor/github.com/gorilla/websocket.(*Conn).advanceFrame(0xc42061b400, 0x0, 0x0, 0xc420869680) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/gorilla/websocket/conn.go:780 +0x5c kubevirt.io/kubevirt/vendor/github.com/gorilla/websocket.(*Conn).NextReader(0xc42061b400, 0xc420b9e000, 0x1, 0x8000, 0x1, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/gorilla/websocket/conn.go:940 +0xa3 kubevirt.io/kubevirt/pkg/kubecli.(*BinaryReadWriter).Read(0xc420125d98, 0xc420b9e000, 0x8000, 0x8000, 0x1, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:102 +0x32 io.copyBuffer(0x14b6160, 0xc420125d88, 0x14b6200, 0xc420125d98, 0xc420b9e000, 0x8000, 0x8000, 0x6d6c617574726976, 0x2f73656e69686361, 0x35696d7674736574) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/io.go:400 +0x164 io.Copy(0x14b6160, 0xc420125d88, 0x14b6200, 0xc420125d98, 0x657073222c7d2231, 0x6e7572227b3a2263, 0x72743a22676e696e) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/io.go:362 +0x5a kubevirt.io/kubevirt/pkg/kubecli.(*wsStreamer).Stream.func2(0x14b6140, 0xc420125d40, 0x14b6160, 0xc420125d88, 0xc420125d98, 0xc4209b9da0) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:238 +0x4b created by kubevirt.io/kubevirt/pkg/kubecli.(*wsStreamer).Stream /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:237 +0x137 goroutine 7486 [select, 2 minutes]: kubevirt.io/kubevirt/vendor/github.com/google/goexpect.(*GExpect).waitForSession.func1(0xc420d56110, 0xc4208699e0, 0xc4207a50e0, 0x14baa40, 0xc420125d60) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/google/goexpect/expect.go:1001 +0x109 created by kubevirt.io/kubevirt/vendor/github.com/google/goexpect.(*GExpect).waitForSession /root/go/src/kubevirt.io/kubevirt/vendor/github.com/google/goexpect/expect.go:998 +0xc3 make: *** [functest] Error 2 + make cluster-down ./cluster/down.sh